Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyTigerGraph/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,6 @@
from pyTigerGraph.pytgasync.pyTigerGraph import AsyncTigerGraphConnection
from pyTigerGraph.common.exception import TigerGraphException

__version__ = "1.9.0"
__version__ = "1.9.1"

__license__ = "Apache 2"
29 changes: 25 additions & 4 deletions pyTigerGraph/ai/ai.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,7 +247,7 @@ def initializeServer(self, server="graphrag"):
url = f"{self.nlqs_host}/{self.conn.graphname}/{self.server_mode}/initialize"
return self.conn._req("POST", url, authMode="pwd", resKey=None)

def createDocumentIngest(self, data_source, data_source_config, loader_config, file_format):
def createDocumentIngest(self, data_source="", data_source_config={}, loader_config={}, file_format=""):
""" Create a document ingest.
Args:
data_source (str):
Expand All @@ -271,7 +271,7 @@ def createDocumentIngest(self, data_source, data_source_config, loader_config, f
url = f"{self.nlqs_host}/{self.conn.graphname}/{self.server_mode}/create_ingest"
return self.conn._req("POST", url, authMode="pwd", data=data, jsonData=True, resKey=None)

def runDocumentIngest(self, load_job_id, data_source_id, data_path, data_source="remote"):
def runDocumentIngest(self, load_job_id="", data_source_id="", data_path="", data_source="", load_job_info: dict = None):
""" Run a document ingest.
Args:
load_job_id (str):
Expand All @@ -280,17 +280,38 @@ def runDocumentIngest(self, load_job_id, data_source_id, data_path, data_source=
The data source ID of the document ingest.
data_path (str):
The data path of the document ingest.
data_source (str):
The data source of the document ingest.
load_job_info (dict):
The information of the load job.
Returns:
JSON response from the document ingest.
"""
if data_source.lower() == "local" or data_path.startswith(("/", ".", "~")) :
if load_job_info:
if not load_job_id and "load_job_id" in load_job_info:
load_job_id = load_job_info["load_job_id"]
if not data_source_id and "data_source_id" in load_job_info:
data_source_id = load_job_info["data_source_id"]
if not data_path and "data_path" in load_job_info:
data_path = load_job_info["data_path"]
if not data_source and "data_source" in load_job_info:
data_source = load_job_info["data_source"]

if not load_job_id or not data_path or not data_source_id and not load_job_info:
raise ValueError("load_job_id and data_path are required, one of data_source_id or load_job_info must be provided.")

if data_source.lower() == "local" and data_path.startswith(("/", ".", "~")) :
return self.conn.runLoadingJobWithFile(data_path, data_source_id, load_job_id)
else:
data = {
"load_job_id": load_job_id,
"data_source_id": data_source_id,
"file_path": data_path
}
if load_job_info:
data["load_job_info"] = load_job_info
if data_source_id:
data["data_source_id"] = data_source_id

url = f"{self.nlqs_host}/{self.conn.graphname}/{self.server_mode}/ingest"
return self.conn._req("POST", url, authMode="pwd", data=data, jsonData=True, resKey=None)

Expand Down
14 changes: 8 additions & 6 deletions pyTigerGraph/common/auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def _parse_create_secret(response: str, alias: str = "", withAlias: bool = False
if not withAlias:
if logger.level == logging.DEBUG:
logger.debug("return: " + str(secret))
logger.info("exit: createSecret (withAlias")
logger.debug("exit: createSecret (withAlias")

return secret

Expand All @@ -49,7 +49,7 @@ def _parse_create_secret(response: str, alias: str = "", withAlias: bool = False

if logger.level == logging.DEBUG:
logger.debug("return: " + str(ret))
logger.info("exit: createSecret (alias)")
logger.debug("exit: createSecret (alias)")

return ret

Expand All @@ -61,7 +61,7 @@ def _parse_create_secret(response: str, alias: str = "", withAlias: bool = False

def _prep_token_request(restppUrl: str,
gsUrl: str,
graphname: str,
graphname: str = None,
version: str = None,
secret: str = None,
lifetime: int = None,
Expand All @@ -83,7 +83,7 @@ def _prep_token_request(restppUrl: str,
else:
method = "POST"
url = gsUrl + "/gsql/v1/tokens" # used for TG 4.x
data = {"graph": graphname}
data = {"graph": graphname} if graphname else {}

# alt_url and alt_data used to construct the method and url for functions run in TG version 3.x
alt_url = restppUrl+"/requesttoken" # used for TG 3.x
Expand Down Expand Up @@ -113,7 +113,9 @@ def _parse_token_response(response: dict,
mainVer: int,
base64_credential: str) -> Tuple[Union[Tuple[str, str], str], dict]:
if not response.get("error"):
token = response["token"]
# Note that /requesttoken has sightly different response using username-password pair.
# See https://docs.tigergraph.com/tigergraph-server/3.10/api/built-in-endpoints#_request_a_token
token = response.get("results", response)["token"]
if setToken:
apiToken = token
authHeader = {'Authorization': "Bearer " + apiToken}
Expand All @@ -125,7 +127,7 @@ def _parse_token_response(response: dict,
if response.get("expiration"):
# On >=4.1 the format for the date of expiration changed. Convert back to old format
# Can't use self._versionGreaterThan4_0 since you need a token for that
if mainVer == 4:
if mainVer >= 4:
return (token, response.get("expiration")), authHeader
else:
return (token, response.get("expiration"), \
Expand Down
23 changes: 13 additions & 10 deletions pyTigerGraph/common/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def excepthook(type, value, traceback):


class PyTigerGraphCore(object):
def __init__(self, host: str = "http://127.0.0.1", graphname: str = "MyGraph",
def __init__(self, host: str = "http://127.0.0.1", graphname: str = "",
gsqlSecret: str = "", username: str = "tigergraph", password: str = "tigergraph",
tgCloud: bool = False, restppPort: Union[int, str] = "9000",
gsPort: Union[int, str] = "14240", gsqlVersion: str = "", version: str = "",
Expand Down Expand Up @@ -85,7 +85,7 @@ def __init__(self, host: str = "http://127.0.0.1", graphname: str = "MyGraph",
TigerGraphException: In case on invalid URL scheme.

"""
logger.info("entry: __init__")
logger.debug("entry: __init__")
if logger.level == logging.DEBUG:
logger.debug("params: " + self._locals(locals()))

Expand All @@ -110,6 +110,7 @@ def __init__(self, host: str = "http://127.0.0.1", graphname: str = "MyGraph",
self.base64_credential = base64.b64encode(
"{0}:{1}".format(self.username, self.password).encode("utf-8")).decode("utf-8")

# Detect auth mode automatically by checking if jwtToken or apiToken is provided
self.authHeader = self._set_auth_header()

# TODO Eliminate version and use gsqlVersion only, meaning TigerGraph server version
Expand Down Expand Up @@ -179,7 +180,7 @@ def __init__(self, host: str = "http://127.0.0.1", graphname: str = "MyGraph",
self.restppPort = restppPort
self.restppUrl = self.host + ":" + self.restppPort

self.gsPort = ""
self.gsPort = gsPort
if self.tgCloud and (gsPort == "14240" or gsPort == "443"):
self.gsPort = sslPort
self.gsUrl = self.host + ":" + sslPort
Expand Down Expand Up @@ -211,7 +212,7 @@ def __init__(self, host: str = "http://127.0.0.1", graphname: str = "MyGraph",

self.asynchronous = False

logger.info("exit: __init__")
logger.debug("exit: __init__")

def _set_auth_header(self):
"""Set the authentication header based on available tokens or credentials."""
Expand Down Expand Up @@ -276,7 +277,7 @@ def _error_check(self, res: dict) -> bool:
return False

def _prep_req(self, authMode, headers, url, method, data):
logger.info("entry: _req")
logger.debug("entry: _prep_req")
if logger.level == logging.DEBUG:
logger.debug("params: " + self._locals(locals()))

Expand All @@ -300,14 +301,14 @@ def _prep_req(self, authMode, headers, url, method, data):
self.authHeader = {
'Authorization': 'Basic {0}'.format(self.base64_credential)}
_headers = self.authHeader
authMode = 'pwd'

if authMode == "pwd":
self.authMode = "pwd"
else:
if self.jwtToken:
_headers = {'Authorization': "Bearer " + self.jwtToken}
else:
_headers = {'Authorization': 'Basic {0}'.format(
self.base64_credential)}
self.authMode = "pwd"

if headers:
_headers.update(headers)
Expand All @@ -328,10 +329,12 @@ def _prep_req(self, authMode, headers, url, method, data):
verify = True

_headers.update({"X-User-Agent": "pyTigerGraph"})
logger.debug("exit: _prep_req")

return _headers, _data, verify

def _parse_req(self, res, jsonResponse, strictJson, skipCheck, resKey):
logger.debug("entry: _parse_req")
if jsonResponse:
try:
res = json.loads(res.text, strict=strictJson)
Expand All @@ -345,7 +348,7 @@ def _parse_req(self, res, jsonResponse, strictJson, skipCheck, resKey):
if not resKey:
if logger.level == logging.DEBUG:
logger.debug("return: " + str(res))
logger.info("exit: _req (no resKey)")
logger.debug("exit: _parse_req (no resKey)")

return res

Expand All @@ -354,7 +357,7 @@ def _parse_req(self, res, jsonResponse, strictJson, skipCheck, resKey):
logger.info("Removed _ from resKey")
if logger.level == logging.DEBUG:
logger.debug("return: " + str(res[resKey]))
logger.info("exit: _req (resKey)")
logger.debug("exit: _parse_req (resKey)")

return res[resKey]

Expand Down
14 changes: 7 additions & 7 deletions pyTigerGraph/common/edge.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def _parse_get_edge_source_vertex_type(edgeTypeDetails):

if logger.level == logging.DEBUG:
logger.debug("return: " + str(ret))
logger.info("exit: getEdgeSourceVertexType (single source)")
logger.debug("exit: getEdgeSourceVertexType (single source)")

return ret

Expand All @@ -38,7 +38,7 @@ def _parse_get_edge_source_vertex_type(edgeTypeDetails):

if logger.level == logging.DEBUG:
logger.debug("return: " + str(vts))
logger.info("exit: getEdgeSourceVertexType (multi source)")
logger.debug("exit: getEdgeSourceVertexType (multi source)")

return vts
else:
Expand All @@ -57,7 +57,7 @@ def _parse_get_edge_target_vertex_type(edgeTypeDetails):

if logger.level == logging.DEBUG:
logger.debug("return: " + str(ret))
logger.info("exit: getEdgeTargetVertexType (single target)")
logger.debug("exit: getEdgeTargetVertexType (single target)")

return ret

Expand All @@ -70,7 +70,7 @@ def _parse_get_edge_target_vertex_type(edgeTypeDetails):

if logger.level == logging.DEBUG:
logger.debug("return: " + str(vts))
logger.info("exit: getEdgeTargetVertexType (multi target)")
logger.debug("exit: getEdgeTargetVertexType (multi target)")

return vts
else:
Expand Down Expand Up @@ -126,7 +126,7 @@ def _parse_get_edge_count_from(res, edgeType):

if logger.level == logging.DEBUG:
logger.debug("return: " + str(ret))
logger.info("exit: getEdgeCountFrom (single edge type)")
logger.debug("exit: getEdgeCountFrom (single edge type)")

return ret

Expand Down Expand Up @@ -447,7 +447,7 @@ def edgeSetToDataFrame(edgeSet: list,
ID or source and target vertices, and the edge type.

"""
logger.info("entry: edgeSetToDataFrame")
logger.debug("entry: edgeSetToDataFrame")
logger.debug("params: " + str(locals()))

try:
Expand All @@ -469,6 +469,6 @@ def edgeSetToDataFrame(edgeSet: list,

if logger.level == logging.DEBUG:
logger.debug("return: " + str(ret))
logger.info("exit: edgeSetToDataFrame")
logger.debug("exit: edgeSetToDataFrame")

return ret
2 changes: 1 addition & 1 deletion pyTigerGraph/common/gsql.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def clean_res(resp: list) -> str:

if logger.level == logging.DEBUG:
logger.debug("return: " + str(ret))
logger.info("exit: gsql (success)")
logger.debug("exit: gsql (success)")

return string_without_ansi

Expand Down
2 changes: 1 addition & 1 deletion pyTigerGraph/common/loading.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ def _prep_run_loading_job_with_file(filePath):
return data
except OSError as ose:
logger.error(ose.strerror)
logger.info("exit: runLoadingJobWithFile")
logger.debug("exit: runLoadingJobWithFile")

return None
# TODO Should throw exception instead?
Expand Down
12 changes: 6 additions & 6 deletions pyTigerGraph/common/path.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def parse_vertices(vertices: Union[dict, tuple, list]) -> list:
Returns:
A list of vertices in the format required by the path finding endpoints.
"""
logger.info("entry: parseVertices")
logger.debug("entry: parseVertices")
logger.debug("params: " + str(locals))

ret = []
Expand All @@ -78,7 +78,7 @@ def parse_vertices(vertices: Union[dict, tuple, list]) -> list:

if logger.level == logging.DEBUG:
logger.debug("return: " + str(ret))
logger.info("exit: parseVertices")
logger.debug("exit: parseVertices")

return ret

Expand All @@ -94,7 +94,7 @@ def parse_filters(filters: Union[dict, tuple, list]) -> list:
Returns:
A list of filters in the format required by the path finding endpoints.
"""
logger.info("entry: parseFilters")
logger.debug("entry: parseFilters")
logger.debug("params: " + str(locals()))

ret = []
Expand All @@ -111,11 +111,11 @@ def parse_filters(filters: Union[dict, tuple, list]) -> list:
logger.warning("Invalid filter type or value: " + str(f))

logger.debug("return: " + str(ret))
logger.info("exit: parseFilters")
logger.debug("exit: parseFilters")

return ret

logger.info("entry: _preparePathParams")
logger.debug("entry: _preparePathParams")
logger.debug("params: " + str(locals()))

# Assembling the input payload
Expand All @@ -136,6 +136,6 @@ def parse_filters(filters: Union[dict, tuple, list]) -> list:
ret = json.dumps(data)

logger.debug("return: " + str(ret))
logger.info("exit: _preparePathParams")
logger.debug("exit: _preparePathParams")

return ret
9 changes: 5 additions & 4 deletions pyTigerGraph/common/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def _parse_query_parameters(params: dict) -> str:
"key": [([p_id1, p_id2, ...], "vtype"), ...]
I.e. multiple primary IDs of the same vertex type
"""
logger.info("entry: _parseQueryParameters")
logger.debug("entry: _parseQueryParameters")
logger.debug("params: " + str(params))

ret = ""
Expand All @@ -76,7 +76,7 @@ def _parse_query_parameters(params: dict) -> str:
k + "[" + str(i) + "].type=" + vv[1] + "&"
else:
raise TigerGraphException(
"Invalid parameter value: (vertex_primary_id , vertex_type)"
"Invalid parameter value: (vertex_primary_id, vertex_type)"
" was expected.")
else:
ret += k + "=" + _safe_char(vv) + "&"
Expand All @@ -86,11 +86,12 @@ def _parse_query_parameters(params: dict) -> str:
_safe_char(v.strftime("%Y-%m-%d %H:%M:%S")) + "&"
else:
ret += k + "=" + _safe_char(v) + "&"
ret = ret[:-1]
if ret:
ret = ret[:-1]

if logger.level == logging.DEBUG:
logger.debug("return: " + str(ret))
logger.info("exit: _parseQueryParameters")
logger.debug("exit: _parseQueryParameters")

return ret

Expand Down
Loading