Skip to content

Commit

Permalink
Solving SAVE TO QUERY PROFILE problem (#439)
Browse files Browse the repository at this point in the history
* Solving SAVE TO QUERY PROFILE problem

 - Reducing the number of save_to_query_profile calls. Specifically for the basic functions
 - adding more doc info / correcting some doc error

* Update vdataframe.py

* Update ci.yaml

* Update ci.yaml

* test

* Update setup.py
  • Loading branch information
oualib committed Dec 10, 2022
1 parent a784794 commit 532d0f7
Show file tree
Hide file tree
Showing 8 changed files with 8 additions and 229 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.6, 3.7, 3.8]
python-version: [3.7, 3.8, 3.9]

steps:
- name: Check out repository
Expand Down
1 change: 1 addition & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Topic :: Database",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
Expand Down
2 changes: 1 addition & 1 deletion tox.ini
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[tox]
envlist = py36,py37,py38
envlist = py37,py38,py39

[testenv]
passenv = *
Expand Down
2 changes: 1 addition & 1 deletion verticapy/connect.py
Original file line number Diff line number Diff line change
Expand Up @@ -494,7 +494,7 @@ def set_external_connection(cid: str, rowset: int = 500, symbol: str = "$"):
SQLFetch() cycle.
symbol: str, optional
One of the following:
"$", "€", "£", "%", "@", "&", "§", "%", "?", "!"
"$", "€", "£", "%", "@", "&", "§", "?", "!"
A special character, to identify the connection.
For example, if the symbol is '$', you can call external tables
with the input cid by writing $$$QUERY$$$, where QUERY represents
Expand Down
10 changes: 0 additions & 10 deletions verticapy/learn/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,16 +83,6 @@ def does_model_exist(
1 if the model exists and is native.
2 if the model exists and is not native.
"""
# Saving information to the query profile table
save_to_query_profile(
name="does_model_exist",
path="learn.tools",
json_dict={
"name": name,
"raise_error": raise_error,
"return_model_type": return_model_type,
},
)
# -#
check_types([("name", name, [str])])
model_type = None
Expand Down
92 changes: 0 additions & 92 deletions verticapy/utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,12 +195,6 @@ def create_schema(
bool
True if the schema was successfully created, False otherwise.
"""
# Saving information to the query profile table
save_to_query_profile(
name="create_schema",
path="utilities",
json_dict={"schema": schema, "raise_error": raise_error,},
)
# -#
check_types(
[("schema", schema, [str]), ("raise_error", raise_error, [bool]),]
Expand Down Expand Up @@ -254,20 +248,6 @@ def create_table(
bool
True if the table was successfully created, False otherwise.
"""
# Saving information to the query profile table
save_to_query_profile(
name="create_table",
path="utilities",
json_dict={
"table_name": table_name,
"schema": schema,
"dtype": dtype,
"genSQL": genSQL,
"temporary_table": temporary_table,
"temporary_local_table": temporary_local_table,
"raise_error": raise_error,
},
)
# -#
check_types(
[
Expand Down Expand Up @@ -316,10 +296,6 @@ def create_verticapy_schema():
---------------------------------------------------------------------------
Creates a schema named 'verticapy' used to store VerticaPy extended models.
"""
# Saving information to the query profile table
save_to_query_profile(
name="create_verticapy_schema", path="utilities", json_dict={},
)
sql = "CREATE SCHEMA IF NOT EXISTS verticapy;"
executeSQL(sql, title="Creating VerticaPy schema.")
sql = """CREATE TABLE IF NOT EXISTS verticapy.models (model_name VARCHAR(128),
Expand Down Expand Up @@ -365,12 +341,6 @@ def drop(name: str = "", method: str = "auto", raise_error: bool = False, **kwds
bool
True if the relation was dropped, False otherwise.
"""
# Saving information to the query profile table
save_to_query_profile(
name="drop",
path="utilities",
json_dict={"name": name, "method": method, "raise_error": raise_error,},
)
# -#
if "relation_type" in kwds and method == "auto":
method = kwds["relation_type"]
Expand Down Expand Up @@ -594,18 +564,6 @@ def get_data_types(
list of tuples
The list of the different columns and their respective type.
"""
# Saving information to the query profile table
save_to_query_profile(
name="get_data_types",
path="utilities",
json_dict={
"expr": expr,
"column": column,
"table_name": table_name,
"schema": schema,
"usecols": usecols,
},
)
# -#
check_types(
[
Expand Down Expand Up @@ -987,10 +945,6 @@ def isvmap(
bool
True if the column is a VMap.
"""
# Saving information to the query profile table
save_to_query_profile(
name="isvmap", path="utilities", json_dict={"expr": expr, "column": column,},
)
# -#
from verticapy import vDataFrame
from verticapy.connect import current_cursor
Expand Down Expand Up @@ -1279,29 +1233,6 @@ def pcsv(
read_csv : Ingests a CSV file into the Vertica database.
read_json : Ingests a JSON file into the Vertica database.
"""
# Saving information to the query profile table
save_to_query_profile(
name="pcsv",
path="utilities",
json_dict={
"path": path,
"sep": sep,
"header": header,
"header_names": header_names,
"na_rep": na_rep,
"quotechar": quotechar,
"escape": escape,
"record_terminator": record_terminator,
"trim": trim,
"omit_empty_keys": omit_empty_keys,
"reject_on_duplicate": reject_on_duplicate,
"reject_on_empty_key": reject_on_empty_key,
"reject_on_materialized_type_error": reject_on_materialized_type_error,
"ingest_local": ingest_local,
"flex_name": flex_name,
},
)
# -#
if record_terminator == "\n":
record_terminator = "\\n"
if not (flex_name):
Expand Down Expand Up @@ -1388,13 +1319,6 @@ def pjson(path: str, ingest_local: bool = True):
read_csv : Ingests a CSV file into the Vertica database.
read_json : Ingests a JSON file into the Vertica database.
"""
# Saving information to the query profile table
save_to_query_profile(
name="pjson",
path="utilities",
json_dict={"path": path, "ingest_local": ingest_local,},
)
# -#
flex_name = gen_tmp_name(name="flex")[1:-1]
executeSQL(
f"CREATE FLEX LOCAL TEMP TABLE {flex_name}(x int) ON COMMIT PRESERVE ROWS;",
Expand Down Expand Up @@ -2940,12 +2864,6 @@ def set_option(option: str, value: Union[bool, int, str] = None):
value: object, optional
New value of option.
"""
# Saving information to the query profile table
save_to_query_profile(
name="set_option",
path="utilities",
json_dict={"option": option, "value": value,},
)
# -#
if isinstance(option, str):
option = option.lower()
Expand Down Expand Up @@ -3639,12 +3557,6 @@ def to_tablesample(
--------
tablesample : Object in memory created for rendering purposes.
"""
# Saving information to the query profile table
save_to_query_profile(
name="to_tablesample",
path="utilities",
json_dict={"query": query, "title": title, "max_columns": max_columns,},
)
# -#
check_types(
[("query", query, [str]), ("max_columns", max_columns, [int]),]
Expand Down Expand Up @@ -3797,10 +3709,6 @@ def version(condition: list = []):
List containing the version information.
[MAJOR, MINOR, PATCH, POST]
"""
# Saving information to the query profile table
save_to_query_profile(
name="version", path="utilities", json_dict={"condition": condition,},
)
# -#
check_types([("condition", condition, [list])])
if condition:
Expand Down
29 changes: 0 additions & 29 deletions verticapy/vcolumn.py
Original file line number Diff line number Diff line change
Expand Up @@ -366,10 +366,6 @@ def add_copy(self, name: str):
--------
vDataFrame.eval : Evaluates a customized expression.
"""
# Saving information to the query profile table
save_to_query_profile(
name="add_copy", path="vcolumn.vColumn", json_dict={"name": name,},
)
# -#
check_types([("name", name, [str])])
name = quote_ident(name.replace('"', "_"))
Expand Down Expand Up @@ -1869,11 +1865,6 @@ def distinct(self, **kwargs):
--------
vDataFrame.topk : Returns the vColumn most occurent elements.
"""
# Saving information to the query profile table
save_to_query_profile(
name="distinct", path="vcolumn.vColumn", json_dict={},
)
# -#
if "agg" not in kwargs:
query = "SELECT /*+LABEL('vColumn.distinct')*/ {0} AS {1} FROM {2} WHERE {1} IS NOT NULL GROUP BY {1} ORDER BY {1}".format(
bin_spatial_to_str(self.category(), self.alias),
Expand Down Expand Up @@ -2641,11 +2632,6 @@ def head(self, limit: int = 5):
--------
vDataFrame[].tail : Returns the a part of the vColumn.
"""
# Saving information to the query profile table
save_to_query_profile(
name="head", path="vcolumn.vColumn", json_dict={"limit": limit,},
)
# -#
return self.iloc(limit=limit)

# ---#
Expand Down Expand Up @@ -2754,12 +2740,6 @@ def iloc(self, limit: int = 5, offset: int = 0):
vDataFrame[].head : Returns the head of the vColumn.
vDataFrame[].tail : Returns the tail of the vColumn.
"""
# Saving information to the query profile table
save_to_query_profile(
name="iloc",
path="vcolumn.vColumn",
json_dict={"limit": limit, "offset": offset,},
)
# -#
check_types([("limit", limit, [int, float]), ("offset", offset, [int, float])])
if offset < 0:
Expand Down Expand Up @@ -3760,10 +3740,6 @@ def numh(self, method: str = "auto"):
float
optimal bar width.
"""
# Saving information to the query profile table
save_to_query_profile(
name="numh", path="vcolumn.vColumn", json_dict={"method": method,},
)
# -#
check_types(
[("method", method, ["sturges", "freedman_diaconis", "fd", "auto"])]
Expand Down Expand Up @@ -4758,11 +4734,6 @@ def tail(self, limit: int = 5):
--------
vDataFrame[].head : Returns the head of the vColumn.
"""
# Saving information to the query profile table
save_to_query_profile(
name="tail", path="vcolumn.vColumn", json_dict={"limit": limit,},
)
# -#
return self.iloc(limit=limit, offset=-1)

# ---#
Expand Down
Loading

0 comments on commit 532d0f7

Please sign in to comment.