mirror of https://github.com/citusdata/citus.git
add colocat with as optional config
parent
f857d7fbb4
commit
ad3b6ed164
|
@ -53,6 +53,7 @@ targetTables: <Table[]>
|
|||
name: <string>
|
||||
type: <string>
|
||||
distinctCopyCount: <int>
|
||||
colocateWith: Optional<string>
|
||||
```
|
||||
|
||||
Explanation:
|
||||
|
@ -72,6 +73,7 @@ targetTables: "array of tables that will be used in generated queries"
|
|||
name: "name of column"
|
||||
type: "name of data type of column(only support 'int' now)"
|
||||
distinctCopyCount: "how many tables with the same configuration we should create(only by changing full name, still using the same name prefix)"
|
||||
colocateWith: "colocated_with parameter"
|
||||
```
|
||||
|
||||
|
||||
|
|
|
@ -73,6 +73,8 @@ def removeFailedQueryOutputFromFiles(distQueryOutFile, localQueryOutFile):
|
|||
acceptableErrors = [
|
||||
"ERROR: complex joins are only supported when all distributed tables are co-located and joined on their distribution columns",
|
||||
"ERROR: recursive complex joins are only supported when all distributed tables are co-located and joined on their distribution columns",
|
||||
"ERROR: cannot push down this subquery",
|
||||
"ERROR: cannot perform a lateral outer join when a distributed subquery references complex subqueries, CTEs or local tables",
|
||||
]
|
||||
failedDistQueryIds = findFailedQueriesFromFile(distQueryOutFile, acceptableErrors)
|
||||
removeFailedQueryOutputFromFile(distQueryOutFile, failedDistQueryIds)
|
||||
|
|
|
@ -36,18 +36,19 @@ targetTables:
|
|||
name: id
|
||||
type: int
|
||||
distinctCopyCount: 2
|
||||
# - Table:
|
||||
# name: single_dist
|
||||
# citusType: SINGLE_SHARD_DISTRIBUTED
|
||||
# maxAllowedUseOnQuery: 10
|
||||
# rowCount: 10
|
||||
# nullRate: 0.1
|
||||
# duplicateRate: 0.1
|
||||
# columns:
|
||||
# - Column:
|
||||
# name: id
|
||||
# type: int
|
||||
# distinctCopyCount: 2
|
||||
- Table:
|
||||
name: single_dist
|
||||
citusType: SINGLE_SHARD_DISTRIBUTED
|
||||
maxAllowedUseOnQuery: 10
|
||||
rowCount: 10
|
||||
nullRate: 0.1
|
||||
duplicateRate: 0.1
|
||||
columns:
|
||||
- Column:
|
||||
name: id
|
||||
type: int
|
||||
distinctCopyCount: 2
|
||||
colocateWith: none
|
||||
- Table:
|
||||
name: ref
|
||||
citusType: REFERENCE
|
||||
|
|
|
@ -49,6 +49,7 @@ def parseTable(targetTableDict):
|
|||
col = parseColumn(columnDict)
|
||||
columns.append(col)
|
||||
distinctCopyCount = targetTableDict["distinctCopyCount"]
|
||||
colocateWith = targetTableDict.get("colocateWith", "default")
|
||||
return Table(
|
||||
name,
|
||||
citusType,
|
||||
|
@ -58,6 +59,7 @@ def parseTable(targetTableDict):
|
|||
duplicateRate,
|
||||
columns,
|
||||
distinctCopyCount,
|
||||
colocateWith,
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -35,51 +35,23 @@ def getTableData():
|
|||
|
||||
def _genOverlappingData(tableName, startVal, rowCount):
|
||||
"""returns string to fill table with [startVal,startVal+rowCount] range of integers"""
|
||||
dataGenerationSql = ""
|
||||
dataGenerationSql += "INSERT INTO " + tableName
|
||||
dataGenerationSql += (
|
||||
" SELECT i FROM generate_series("
|
||||
+ str(startVal)
|
||||
+ ","
|
||||
+ str(startVal + rowCount)
|
||||
+ ") i;"
|
||||
)
|
||||
return dataGenerationSql
|
||||
return f"INSERT INTO {tableName} SELECT i FROM generate_series({startVal}, {startVal + rowCount}) i;"
|
||||
|
||||
|
||||
def _genNullData(tableName, nullCount):
|
||||
"""returns string to fill table with NULLs"""
|
||||
dataGenerationSql = ""
|
||||
dataGenerationSql += "INSERT INTO " + tableName
|
||||
dataGenerationSql += (
|
||||
" SELECT NULL FROM generate_series(0," + str(nullCount) + ") i;"
|
||||
return (
|
||||
f"INSERT INTO {tableName} SELECT NULL FROM generate_series(0, {nullCount}) i;"
|
||||
)
|
||||
return dataGenerationSql
|
||||
|
||||
|
||||
def _genDupData(tableName, dupRowCount):
|
||||
"""returns string to fill table with duplicate integers which are fetched from given table"""
|
||||
dataGenerationSql = ""
|
||||
dataGenerationSql += "INSERT INTO " + tableName
|
||||
dataGenerationSql += (
|
||||
" SELECT * FROM "
|
||||
+ tableName
|
||||
+ " ORDER BY "
|
||||
+ getConfig().commonColName
|
||||
+ " LIMIT "
|
||||
+ str(dupRowCount)
|
||||
+ ";"
|
||||
)
|
||||
return dataGenerationSql
|
||||
return f"INSERT INTO {tableName} SELECT * FROM {tableName} ORDER BY {getConfig().commonColName} LIMIT {dupRowCount};"
|
||||
|
||||
|
||||
def _genNonOverlappingData(tableName, startVal, tableIdx):
|
||||
"""returns string to fill table with different integers for given table"""
|
||||
startVal = startVal + tableIdx * 20
|
||||
endVal = startVal + 20
|
||||
dataGenerationSql = ""
|
||||
dataGenerationSql += "INSERT INTO " + tableName
|
||||
dataGenerationSql += (
|
||||
" SELECT i FROM generate_series(" + str(startVal) + "," + str(endVal) + ") i;"
|
||||
)
|
||||
return dataGenerationSql
|
||||
return f"INSERT INTO {tableName} SELECT i FROM generate_series({startVal}, {endVal}) i;"
|
||||
|
|
|
@ -17,10 +17,8 @@ def getTableDDLs():
|
|||
|
||||
def _genTableDDL(table):
|
||||
ddl = ""
|
||||
ddl += "DROP TABLE IF EXISTS " + table.name + ";"
|
||||
ddl += "\n"
|
||||
|
||||
ddl += "CREATE TABLE " + table.name + "("
|
||||
ddl += f"DROP TABLE IF EXISTS {table.name};\n"
|
||||
ddl += f"CREATE TABLE {table.name}("
|
||||
for column in table.columns[:-1]:
|
||||
ddl += _genColumnDDL(column)
|
||||
ddl += ",\n"
|
||||
|
@ -29,28 +27,13 @@ def _genTableDDL(table):
|
|||
ddl += ");\n"
|
||||
|
||||
if isTableHashDistributed(table):
|
||||
ddl += (
|
||||
"SELECT create_distributed_table("
|
||||
+ "'"
|
||||
+ table.name
|
||||
+ "','"
|
||||
+ getConfig().commonColName
|
||||
+ "'"
|
||||
+ ");"
|
||||
)
|
||||
ddl += "\n"
|
||||
ddl += f"SELECT create_distributed_table('{table.name}','{getConfig().commonColName}', colocate_with=>'{table.colocateWith}');\n"
|
||||
if isTableSingleShardDistributed(table):
|
||||
ddl += "SELECT create_distributed_table(" + "'" + table.name + "'" ",NULL);"
|
||||
ddl += "\n"
|
||||
ddl += f"SELECT create_distributed_table('{table.name}', NULL, colocate_with=>'{table.colocateWith}');\n"
|
||||
elif isTableReference(table):
|
||||
ddl += "SELECT create_reference_table(" + "'" + table.name + "'" + ");"
|
||||
ddl += "\n"
|
||||
ddl += f"SELECT create_reference_table('{table.name}');\n"
|
||||
return ddl
|
||||
|
||||
|
||||
def _genColumnDDL(column):
|
||||
ddl = ""
|
||||
ddl += column.name
|
||||
ddl += " "
|
||||
ddl += column.type
|
||||
return ddl
|
||||
return f"{column.name} {column.type}"
|
||||
|
|
|
@ -39,6 +39,7 @@ class Table:
|
|||
duplicateRate,
|
||||
columns,
|
||||
distinctCopyCount,
|
||||
colocateWith,
|
||||
):
|
||||
self.name = name
|
||||
self.citusType = citusType
|
||||
|
@ -48,6 +49,7 @@ class Table:
|
|||
self.duplicateRate = duplicateRate
|
||||
self.columns = columns
|
||||
self.distinctCopyCount = distinctCopyCount
|
||||
self.colocateWith = colocateWith
|
||||
|
||||
|
||||
class Column:
|
||||
|
|
|
@ -114,15 +114,15 @@ class GeneratorContext:
|
|||
def randomCteName(self):
|
||||
"""returns a randomly selected cte name"""
|
||||
randCteRef = random.randint(0, self.currentCteCount - 1)
|
||||
return " cte_" + str(randCteRef)
|
||||
return f" cte_{randCteRef}"
|
||||
|
||||
def curAlias(self):
|
||||
"""returns current alias name to be used for the current table"""
|
||||
return " table_" + str(self.totalRteCount)
|
||||
return f" table_{self.totalRteCount}"
|
||||
|
||||
def curCteAlias(self):
|
||||
"""returns current alias name to be used for the current cte"""
|
||||
return " cte_" + str(self.currentCteCount)
|
||||
return f" cte_{self.currentCteCount}"
|
||||
|
||||
def hasAnyCte(self):
|
||||
"""returns if context has any cte"""
|
||||
|
@ -153,7 +153,7 @@ class GeneratorContext:
|
|||
# do not enforce per table rte limit if we are inside cte
|
||||
if self.insideCte:
|
||||
rteName = random.choice(getAllTableNames())
|
||||
return " " + rteName + " "
|
||||
return f" {rteName} "
|
||||
|
||||
while True:
|
||||
# keep trying to find random table by eliminating the ones which hit table limit
|
||||
|
@ -173,7 +173,7 @@ class GeneratorContext:
|
|||
|
||||
# increment rte count for the table name
|
||||
self.perTableRtes[rteName] += 1
|
||||
return " " + rteName + " "
|
||||
return f" {rteName} "
|
||||
|
||||
|
||||
def newQuery():
|
||||
|
@ -206,8 +206,7 @@ def _genQuery(genCtx):
|
|||
and not genCtx.usedAvg
|
||||
):
|
||||
genCtx.usedAvg = True
|
||||
query += "SELECT "
|
||||
query += "count(*), avg(avgsub." + getConfig().commonColName + ") FROM "
|
||||
query += f"SELECT count(*), avg(avgsub.{getConfig().commonColName}) FROM "
|
||||
query += _genSubqueryRte(genCtx)
|
||||
query += " AS avgsub"
|
||||
else:
|
||||
|
@ -226,35 +225,24 @@ def _genQuery(genCtx):
|
|||
|
||||
def _genOrderBy(genCtx):
|
||||
# 'ORDER BY' DistColName
|
||||
query = ""
|
||||
query += " ORDER BY "
|
||||
query += getConfig().commonColName + " "
|
||||
return query
|
||||
return f" ORDER BY {getConfig().commonColName} "
|
||||
|
||||
|
||||
def _genLimit(genCtx):
|
||||
# 'LIMIT' 'random()'
|
||||
query = ""
|
||||
query += " LIMIT "
|
||||
(fromVal, toVal) = getConfig().limitRange
|
||||
query += str(random.randint(fromVal, toVal))
|
||||
return query
|
||||
return f" LIMIT {random.randint(fromVal, toVal)}"
|
||||
|
||||
|
||||
def _genSelectExpr(genCtx):
|
||||
# 'SELECT' 'curAlias()' '.' DistColName
|
||||
query = ""
|
||||
query += " SELECT "
|
||||
commonColName = getConfig().commonColName
|
||||
query += genCtx.curAlias() + "." + commonColName + " "
|
||||
|
||||
return query
|
||||
return f" SELECT {genCtx.curAlias()}.{commonColName} "
|
||||
|
||||
|
||||
def _genFromExpr(genCtx):
|
||||
# 'FROM' (Rte JoinList JoinOp Rte Using || RteList) ['WHERE' 'nextRandomAlias()' '.' DistColName RestrictExpr]
|
||||
query = ""
|
||||
query += " FROM "
|
||||
query = " FROM "
|
||||
|
||||
if shouldSelectThatBranch():
|
||||
query += _genRte(genCtx)
|
||||
|
@ -267,8 +255,7 @@ def _genFromExpr(genCtx):
|
|||
|
||||
alias = genCtx.removeLastAlias()
|
||||
if shouldSelectThatBranch():
|
||||
query += " WHERE "
|
||||
query += alias + "." + getConfig().commonColName
|
||||
query += f" WHERE {alias}.{getConfig().commonColName}"
|
||||
query += _genRestrictExpr(genCtx)
|
||||
return query
|
||||
|
||||
|
@ -353,9 +340,7 @@ def _genJoinList(genCtx):
|
|||
|
||||
def _genUsing(genCtx):
|
||||
# 'USING' '(' DistColName ')'
|
||||
query = ""
|
||||
query += " USING (" + getConfig().commonColName + " ) "
|
||||
return query
|
||||
return f" USING ({getConfig().commonColName}) "
|
||||
|
||||
|
||||
def _genRte(genCtx):
|
||||
|
@ -392,7 +377,7 @@ def _genRte(genCtx):
|
|||
query += _genCteRte(genCtx)
|
||||
elif rteType == RTEType.VALUES:
|
||||
query += _genValuesRte(genCtx)
|
||||
modifiedAlias = alias + "(" + getConfig().commonColName + ") "
|
||||
modifiedAlias = f"{alias}({getConfig().commonColName}) "
|
||||
else:
|
||||
raise BaseException("unknown RTE type")
|
||||
|
||||
|
@ -428,7 +413,5 @@ def _genCteRte(genCtx):
|
|||
|
||||
def _genValuesRte(genCtx):
|
||||
# '( VALUES(random()) )'
|
||||
query = ""
|
||||
(fromVal, toVal) = getConfig().dataRange
|
||||
query += " ( VALUES(" + str(random.randint(fromVal, toVal)) + " ) ) "
|
||||
return query
|
||||
return f" ( VALUES({random.randint(fromVal, toVal)}) ) "
|
||||
|
|
|
@ -25,7 +25,7 @@ def randomRteType():
|
|||
def randomJoinOp():
|
||||
"""returns a randomly selected JoinOp given at config"""
|
||||
joinTypes = getConfig().targetJoinTypes
|
||||
return " " + random.choice(joinTypes).name + " JOIN"
|
||||
return f" {random.choice(joinTypes).name} JOIN"
|
||||
|
||||
|
||||
def randomRestrictOp():
|
||||
|
@ -42,4 +42,4 @@ def randomRestrictOp():
|
|||
else:
|
||||
raise BaseException("Unknown restrict op")
|
||||
|
||||
return " " + opText + " "
|
||||
return f" {opText} "
|
||||
|
|
Loading…
Reference in New Issue