From fca3b33b76ef4b900f08b27b4678a11f15fbc9c8 Mon Sep 17 00:00:00 2001 From: Zac Flamig Date: Tue, 19 Dec 2017 11:39:55 -0600 Subject: [PATCH] Bug fixes and performance improvements (#5) * chore(graphene) upgrade to 2.0.1 * chore(travis): Pull in Rudy's PR for codacy coverage * chore(swagger): Add swagger file --- .codacy.yml | 5 + .travis.yml | 35 +-- README.md | 24 +- dev-requirements.txt | 5 +- openapis/README.md | 17 ++ openapis/swagger.yaml | 142 ++++++++++ peregrine/auth/__init__.py | 2 - peregrine/resources/submission/__init__.py | 2 +- .../resources/submission/graphql/counts.py | 4 +- .../resources/submission/graphql/node.py | 87 +++--- .../submission/graphql/transaction.py | 71 +++-- .../resources/submission/graphql/util.py | 11 +- requirements.txt | 10 +- tests/conftest.py | 47 ++-- tests/graphql/test_graphql.py | 251 +++++++++--------- 15 files changed, 441 insertions(+), 272 deletions(-) create mode 100644 .codacy.yml create mode 100644 openapis/README.md create mode 100644 openapis/swagger.yaml diff --git a/.codacy.yml b/.codacy.yml new file mode 100644 index 00000000..8ea2ca92 --- /dev/null +++ b/.codacy.yml @@ -0,0 +1,5 @@ +--- +coverage: + enabled: true +exclude_paths: + - 'tests/**' diff --git a/.travis.yml b/.travis.yml index 6a1da3fa..3a9bebed 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,47 +1,26 @@ -sudo: false dist: trusty -group: deprecated-2017Q2 +sudo: false language: python python: - "2.7" cache: - # pip: true # We have overridden the default install step, caching manually - # ref: - - directories: - - /home/travis/virtualenv/python2.7.9/lib/python2.7/site-packages - - $HOME/.pip-cache + - pip - apt addons: postgresql: '9.4' - # fix for scipy inside SurvivalPy - apt: - packages: - - libatlas-dev - - libatlas-base-dev - - liblapack-dev - - gfortran services: - elasticsearch before_install: # bust only the the part of the cache that we are frequently changing - - pip uninstall --yes psqlgraph gdcdictionary gdcdatamodel || true - - # fix for scipy inside SurvivalPy - - wget http://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh - - bash miniconda.sh -b -p $HOME/miniconda - - export PATH="$HOME/miniconda/bin:$PATH" - - conda update --yes conda + - pip uninstall --yes gdcdictionary gdcdatamodel || true # command to install dependencies install: - # fix for scipy inside SurvivalPy - - conda install --yes python=$TRAVIS_PYTHON_VERSION pip numpy scipy nose future - - pip install -r requirements.txt - pip install -r dev-requirements.txt @@ -53,10 +32,10 @@ before_script: # command to run tests script: -- | - set -e - PYTHONPATH=. py.test -vv tests/system_test.py tests/graphql/test_graphql.py - set +e + - py.test -vv --cov=peregrine --cov-report xml tests/system_test.py tests/graphql/test_graphql.py + +after_script: + - python-codacy-coverage -r coverage.xml env: global: diff --git a/README.md b/README.md index 04b52d66..173d737f 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,21 @@ -## Peregrine -Query interface to get insights into data in Gen3 Commons Edit -Add topics +# Peregrine + +[![Build Status](https://travis-ci.org/uc-cdis/peregrine.svg?branch=master)](https://travis-ci.org/uc-cdis/peregrine) +[![Codacy Badge](https://api.codacy.com/project/badge/Grade/f6128183864d4e5da5093eb72a3c9c97)](https://www.codacy.com/app/uc-cdis/peregrine?utm_source=github.com&utm_medium=referral&utm_content=uc-cdis/peregrine&utm_campaign=Badge_Grade) +[![Codacy Badge](https://api.codacy.com/project/badge/Coverage/f6128183864d4e5da5093eb72a3c9c97)](https://www.codacy.com/app/uc-cdis/peregrine?utm_source=github.com&utm_medium=referral&utm_content=uc-cdis/peregrine&utm_campaign=Badge_Coverage) + +Query interface to get insights into data in Gen3 Commons + +## Setup + +```bash +# Install requirements. +pip install -r requirements.txt +``` + +## API Documentation + +[OpenAPI documentation available here.](http://petstore.swagger.io/?url=https://raw.githubusercontent.com/uc-cdis/peregrine/master/openapis/swagger.yaml) + +YAML file for the OpenAPI documentation is found in the `openapi` folder (in +the root directory); see the README in that folder for more details. diff --git a/dev-requirements.txt b/dev-requirements.txt index aa3a767c..8bef7893 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -4,11 +4,12 @@ lockfile==0.10.2 coverage==3.7.1 mock==1.0.1 pytest-flask==0.8.1 +pytest-cov==2.5.1 +codacy-coverage moto==0.4.5 Sphinx==1.3.1 sphinxcontrib-httpdomain==1.3.0 --e git+https://git@github.com/NCI-GDC/signpost.git@c5d499936943e71eefe2ec4b3d4ced6ac48f35c0#egg=signpost +-e git+https://git@github.com/NCI-GDC/signpost.git@c8e2aa5ff572c808cba9b522b64f7b497e79c524#egg=signpost -e git+https://git@github.com/uc-cdis/cdisutils-test.git@0.0.1#egg=cdisutilstest -e git+https://git@github.com/uc-cdis/flask-postgres-session.git@0.1.1#egg=flask_postgres_session -e git+https://git@github.com/uc-cdis/sheepdog.git@53cb3a5d69d04a3daf0cdf00957151813fcccac4#egg=sheepdog --e git+https://git@github.com/uc-cdis/storage-client.git@0.1.1#egg=storageclient diff --git a/openapis/README.md b/openapis/README.md new file mode 100644 index 00000000..7667245b --- /dev/null +++ b/openapis/README.md @@ -0,0 +1,17 @@ +# TL;DR + +[Swagger](https://swagger.io/getting-started-with-swagger-i-what-is-swagger/) specification of Peregrine's REST API + +## Swagger Tools + +Use swagger's editor to update swagger.yaml and swagger.json using one of the following: +* [online editor](https://editor.swagger.io/) +* [Docker image](https://hub.docker.com/r/swaggerapi/swagger-editor/) - `docker run -d -p 80:8080 swaggerapi/swagger-editor` +* or pull the editor code from [github](https://github.com/swagger-api/swagger-editor), and `npm start` an editor locally. + +Publish API documentation with the [swagger-ui](https://github.com/swagger-api/swagger-ui) - also easily launched with docker: `docker run -p 80:8080 -e SWAGGER_JSON=/foo/swagger.json -v /bar:/foo swaggerapi/swagger-ui` + +## OpenAPI Spec + +The swagger definition format has been open sourced as the OpenAPI Specification administered by the Linux Foundation. As of writing this the latest spec defining the structure and elements of a swagger.yaml file is [version 3](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.0.md). + diff --git a/openapis/swagger.yaml b/openapis/swagger.yaml new file mode 100644 index 00000000..cbf42715 --- /dev/null +++ b/openapis/swagger.yaml @@ -0,0 +1,142 @@ +swagger: '2.0' +info: + title: Peregrine OpenAPI Specification + version: 0.1.0 + description: >- + GraphQL search microservice for CDIS Gen 3 data commons. Code is available + on [GitHub](https://github.com/uc-cdis/peregrine). + termsOfService: 'http://cdis.uchicago.edu/terms/' + contact: + email: cdis@uchicago.edu + license: + name: Apache 2.0 + url: 'http://www.apache.org/licenses/LICENSE-2.0.html' +host: example.domain +basePath: / +tags: + - name: graphql + description: GraphQL Queries + - name: system + description: System endpoints +schemes: + - https +paths: + /graphql: + post: + tags: + - graphql + summary: Perform a GraphQL Query + description: >- + Perform a graphql query over the data commons given a query, variables, + and name. + consumes: + - application/json + produces: + - application/json + parameters: + - in: body + name: body + description: The GraphQL query and parameters + required: true + schema: + $ref: '#/definitions/QueryInputInfo' + responses: + '200': + description: successful operation + schema: + $ref: '#/definitions/QueryOutputRef' + '400': + description: Invalid input + '403': + description: Invalid authorization token + /getschema: + get: + tags: + - graphql + summary: Returns the data dictionary schema json + description: >- + The data dictionary for the data commons is internally converted from + yaml files to json. This endpoint returns the json schema for the + dictionary for use in generating queries. + produces: + - application/json + responses: + '200': + description: successful operation + schema: + $ref: '#/definitions/SchemaOutputRef' + /_version: + get: + tags: + - system + summary: Returns the version of Peregrine + produces: + - application/json + responses: + '200': + description: successful operation + schema: + $ref: '#/definitions/VersionOutputRef' + /_status: + get: + tags: + - system + summary: Returns if Peregrine is healthy or not + description: >- + Health check endpoint which checks if Peregrine can connect to its + backing PostgreSQL datastore or not. + produces: + - application/text + responses: + '200': + description: Healthy + default: + description: Unhealthy +definitions: + QueryInputInfo: + type: object + properties: + query: + type: string + description: the text of the GraphQL query + variables: + type: string + description: variables for the GraphQL query + operationName: + type: string + description: the name of the operation + example: + query: '{ project {project_id} }' + operationName: null + variables: null + QueryOutputRef: + type: object + properties: + data: + type: object + description: the results of the GraphQL query + SchemaOutputRef: + type: object + properties: + data: + type: object + description: the json schema for the data dictionary + VersionOutputRef: + type: object + properties: + commit: + type: string + description: the current git commit hash for this version + version: + type: string + description: the current git tag version + dictionary: + type: object + description: the version information for the data dictionary + properties: + commit: + type: string + description: the current git commit hash for the dictionary + version: + type: string + description: the current git tag version for the dictionary diff --git a/peregrine/auth/__init__.py b/peregrine/auth/__init__.py index 1251c5c1..293d9164 100644 --- a/peregrine/auth/__init__.py +++ b/peregrine/auth/__init__.py @@ -69,8 +69,6 @@ def get_user_projects(self, user): .all() ) return_res = {} - if not results: - raise AuthError("No project access") for item in results: dbgap_no, user_access = item return_res[dbgap_no] = user_access.privilege diff --git a/peregrine/resources/submission/__init__.py b/peregrine/resources/submission/__init__.py index 32cd3a0e..ad06af14 100644 --- a/peregrine/resources/submission/__init__.py +++ b/peregrine/resources/submission/__init__.py @@ -30,7 +30,7 @@ def get_open_project_ids(): .filter(models.Project.availability_type.astext == "Open") .all() ) - return [project['code'] for project in projects] + return [project['programs'][0]['name'] + '-' + project['code'] for project in projects] def set_read_access_projects(): diff --git a/peregrine/resources/submission/graphql/counts.py b/peregrine/resources/submission/graphql/counts.py index f2b44dca..ab1f10ce 100644 --- a/peregrine/resources/submission/graphql/counts.py +++ b/peregrine/resources/submission/graphql/counts.py @@ -58,14 +58,14 @@ def _queries(): Query.schema( args=ns.NodeSubclassQuery.get_node_query_args(cls), name=NodeCountQuery._query_name(cls), - type=Int, + type=graphene.Int, ) for cls in Node.get_subclasses() ] + [ Query.schema( args=transaction.TransactionLogQuery._args(), name="_{}_count".format(transaction.TransactionLogQuery.name), - type=Int, + type=graphene.Int, ) ] diff --git a/peregrine/resources/submission/graphql/node.py b/peregrine/resources/submission/graphql/node.py index 82ba545f..0b0c72a4 100644 --- a/peregrine/resources/submission/graphql/node.py +++ b/peregrine/resources/submission/graphql/node.py @@ -374,7 +374,7 @@ class Node(graphene.Interface): updated_datetime = graphene.String() -def resolve_node(self, args, info): +def resolve_node(self, info, **args): """The root query for the :class:`Node` node interface. :returns: @@ -432,31 +432,32 @@ def lookup_graphql_type(T): def get_node_class_property_args(cls, not_props_io={}): args = { - name: lookup_graphql_type(types[0])() + name: lookup_graphql_type(types[0]) for name, types in cls.__pg_properties__.iteritems() } if cls.label == 'project': - args['project_id'] = graphene.List(graphene.String()) + args['project_id'] = graphene.List(graphene.String) not_props_io_name = 'NotPropertiesInput_{}'.format(cls.label) if not_props_io_name not in not_props_io: + args_not = {} + args_not.update(get_node_class_property_attrs(cls)) not_props_io[not_props_io_name] = type( not_props_io_name, (graphene.InputObjectType,), - dict(args), + args_not, ) - - args['not'] = not_props_io[not_props_io_name] - + globals()[not_props_io[not_props_io_name].__name__] = not_props_io[not_props_io_name] + args['not'] = graphene.List(__name__ + '.' + not_props_io_name) return args def get_base_node_args(): return dict( id=graphene.String(), - ids=graphene.List(graphene.String()), + ids=graphene.List(graphene.String), quick_search=graphene.String(), - first=graphene.Int(default=10), + first=graphene.Int(default_value=10), offset=graphene.Int(), created_before=graphene.String(), created_after=graphene.String(), @@ -469,7 +470,7 @@ def get_base_node_args(): def get_node_interface_args(): return dict(get_base_node_args(), **dict( - of_type=graphene.List(graphene.String()), + of_type=graphene.List(graphene.String), project_id=graphene.String(), )) @@ -480,9 +481,9 @@ def get_node_class_args(cls, _cache={}): args = get_base_node_args() args.update(dict( - with_links=graphene.List(graphene.String()), - with_links_any=graphene.List(graphene.String()), - without_links=graphene.List(graphene.String()), + with_links=graphene.List(graphene.String), + with_links_any=graphene.List(graphene.String), + without_links=graphene.List(graphene.String), with_path_to=graphene.List(WithPathToInput), with_path_to_any=graphene.List(WithPathToInput), without_path_to=graphene.List(WithPathToInput), @@ -496,7 +497,11 @@ def get_node_class_args(cls, _cache={}): args.update(property_args) for key in args: - if not isinstance(args[key], graphene.Argument): + if isinstance(args[key], graphene.String): + args[key] = graphene.Argument(graphene.String, name=key) + elif isinstance(args[key], graphene.Int): + args[key] = graphene.Argument(graphene.Int, name=key) + elif not isinstance(args[key], graphene.Argument): args[key] = graphene.Argument(args[key], name=key) _cache[cls] = args @@ -507,16 +512,16 @@ def get_node_class_property_attrs(cls, _cache={}): if cls in _cache: return _cache[cls] - def resolve_type(self, *args): + def resolve_type(self, info, *args): return self.__class__.__name__ attrs = { - name: lookup_graphql_type(types[0])() + name: graphene.Field(lookup_graphql_type(types[0])) for name, types in cls.__pg_properties__.iteritems() } attrs['resolve_type'] = resolve_type if cls.label == 'project': - def resolve_project_id(self, *args): + def resolve_project_id(self, info, *args): program = get_authorized_query(md.Program).subq_path( 'projects', lambda q: q.ids(self.id)).one() return '{}-{}'.format(program.name, self.code) @@ -546,11 +551,11 @@ def get_node_class_special_attrs(cls): def get_node_class_link_attrs(cls): attrs = {name: graphene.List( - link['type'].label, + __name__ + '.' + link['type'].label, args=get_node_class_args(link['type']), ) for name, link in cls._pg_edges.iteritems()} - def resolve__related_cases(self, args, info): + def resolve__related_cases(self, info, args): if not CACHE_CASES: return [] # Don't resolve related cases for cases @@ -578,22 +583,22 @@ def resolve__related_cases(self, args, info): for link in cls._pg_edges: name = COUNT_NAME.format(link) attrs[name] = graphene.Field( - graphene.Int(), args=get_node_class_args(cls)) + graphene.Int, args=get_node_class_args(cls)) # transaction logs that affected this node - def resolve_transaction_logs_count(self, args, info): + def resolve_transaction_logs_count(self, info, **args): args = dict(args, **{'entities': [self.id]}) - return transaction.resolve_transaction_log_count(self, args, info) + return transaction.resolve_transaction_log_count(self, info, **args) attrs['resolve__transaction_logs_count'] = resolve_transaction_logs_count attrs['_transaction_logs_count'] = graphene.Field( - graphene.Int(), + graphene.Int, args=transaction.get_transaction_log_args(), ) - def resolve_transaction_logs(self, args, info): + def resolve_transaction_logs(self, info, **args): args = dict(args, **{'entities': [self.id]}) - return transaction.resolve_transaction_log(self, args, info) + return transaction.resolve_transaction_log(self, info, **args) attrs['resolve__transaction_logs'] = resolve_transaction_logs attrs['_transaction_logs'] = graphene.List( @@ -612,7 +617,7 @@ def get_node_class_link_resolver_attrs(cls): link_resolver_attrs = {} for link_name, link in cls._pg_edges.iteritems(): - def link_query(self, args, info, cls=cls, link=link): + def link_query(self, info, cls=cls, link=link, **args): try: target, backref = link['type'], link['backref'] # Subquery for neighor connected to node @@ -628,9 +633,9 @@ def link_query(self, args, info, cls=cls, link=link): raise # Nesting links - def resolve_link(self, args, info, cls=cls, link=link): + def resolve_link(self, info, cls=cls, link=link, **args): try: - q = link_query(self, args, info, cls=cls, link=link) + q = link_query(self, info, cls=cls, link=link, **args) qcls = __gql_object_classes[link['type'].label] return [qcls(**load_node(n)) for n in q.all()] except Exception as e: @@ -641,9 +646,9 @@ def resolve_link(self, args, info, cls=cls, link=link): link_resolver_attrs[lr_name] = resolve_link # Link counts - def resolve_link_count(self, args, info, cls=cls, link=link): + def resolve_link_count(self, info, cls=cls, link=link, **args): try: - q = link_query(self, args, info, cls=cls, link=link) + q = link_query(self, info, cls=cls, link=link, **args) q = q.with_entities(sa.distinct(link['type'].node_id)) q = q.limit(None) return q.count() @@ -655,7 +660,7 @@ def resolve_link_count(self, args, info, cls=cls, link=link): link_resolver_attrs[lr_count_name] = resolve_link_count # Arbitrary link - def resolve_links(self, args, info, cls=cls): + def resolve_links(self, info, cls=cls, **args): try: edge_out_sq = capp.db.edges().filter( psqlgraph.Edge.src_id == self.id).subquery() @@ -685,12 +690,20 @@ def resolve_links(self, args, info, cls=cls): def create_node_class_gql_object(cls): + def _make_inner_meta_type(): + return type('Meta', (), {'interfaces': (Node, )}) attrs = {} attrs.update(get_node_class_property_attrs(cls)) attrs.update(get_node_class_link_attrs(cls)) attrs.update(get_node_class_link_resolver_attrs(cls)) + attrs['Meta'] = _make_inner_meta_type() + + gql_object = type(cls.label, (graphene.ObjectType, ), attrs) - gql_object = type(cls.label, (Node,), attrs) + # Add this class to the global namespace to graphene can load it + globals()[gql_object.__name__] = gql_object + + # Graphene requires lambda's of the classes now so return that here return gql_object @@ -700,7 +713,7 @@ def create_root_fields(fields): name = cls.label # Object resolver - def resolver(self, args, info, cls=cls, gql_object=gql_object): + def resolver(self, info, cls=cls, gql_object=gql_object, **args): q = get_authorized_query(cls) q = apply_query_args(q, args, info) try: @@ -720,7 +733,7 @@ def resolver(self, args, info, cls=cls, gql_object=gql_object): attrs[res_name] = resolver # Count resolver - def count_resolver(self, args, info, cls=cls, gql_object=gql_object): + def count_resolver(self, info, cls=cls, gql_object=gql_object, **args): q = get_authorized_query(cls) q = apply_query_args(q, args, info) q = q.with_entities(sa.distinct(cls.node_id)) @@ -728,7 +741,7 @@ def count_resolver(self, args, info, cls=cls, gql_object=gql_object): return q.count() count_field = graphene.Field( - graphene.Int(), args=get_node_class_args(cls)) + graphene.Int, args=get_node_class_args(cls)) count_name = COUNT_NAME.format(name) count_res_name = 'resolve_{}'.format(count_name) count_resolver.__name__ = count_res_name @@ -741,19 +754,17 @@ def count_resolver(self, args, info, cls=cls, gql_object=gql_object): WithPathToInput = type('WithPathToInput', (graphene.InputObjectType,), dict( id=graphene.String(), type=graphene.String(required=True), - **{k: v for cls_attrs in [ + **{k: graphene.Field(v) for cls_attrs in [ get_node_class_property_args(cls) for cls in psqlgraph.Node.get_subclasses() ] for k, v in cls_attrs.iteritems()} )) - __fields = { cls: create_node_class_gql_object(cls) for cls in psqlgraph.Node.get_subclasses() } - for cls, gql_object in __fields.iteritems(): __gql_object_classes[cls.label] = gql_object diff --git a/peregrine/resources/submission/graphql/transaction.py b/peregrine/resources/submission/graphql/transaction.py index 889c1c3b..62325cc1 100644 --- a/peregrine/resources/submission/graphql/transaction.py +++ b/peregrine/resources/submission/graphql/transaction.py @@ -57,20 +57,20 @@ class GenericEntity(graphene.ObjectType): id = graphene.String() type = graphene.String() - def resolve_type(self, args, info): - return self.type + def resolve_type(self, info, **args): + return lambda: self.type class TransactionResponseError(graphene.ObjectType): - keys = graphene.String().List + keys = graphene.List(graphene.String) dependents = graphene.List(GenericEntity, description='List of entities that depend on this entity such that the transaction failed.') message = graphene.String() type = graphene.String() - def resolve_type(self, args, info): + def resolve_type(self, info, **args): return self.type - def resolve_dependents(self, args, info): + def resolve_dependents(self, info, **args): try: return [ GenericEntity(**dependent) @@ -106,13 +106,13 @@ class TransactionResponseEntity(graphene.ObjectType): errors = graphene.List(TransactionResponseError) warnings = graphene.String() - def resolve_errors(self, args, info): + def resolve_errors(self, info, **args): return [ TransactionResponseError(**error) for error in self.errors ] - def resolve_unique_keys(self, args, info): + def resolve_unique_keys(self, info, **args): """Return a string dump of the unique keys. This is a string because we don't have a polymorphic GraphQL representation of why might be defined as a unique key and it is therefore easier to @@ -134,10 +134,10 @@ def resolve_unique_keys(self, args, info): logger.exception(exception) return [] - def resolve_type(self, args, info): - return self.type + def resolve_type(self, info, **args): + return lambda: self.type - def resolve_related_cases(self, args, info): + def resolve_related_cases(self, info, **args): if CACHE_CASES: return [ instantiate_safely(TransactionResponseEntityRelatedCases, case) @@ -163,7 +163,7 @@ class TransactionResponse(graphene.ObjectType): entities = graphene.List(TransactionResponseEntity) @classmethod - def resolve_entities(cls, response, *args): + def resolve_entities(cls, response, **args): try: return [ instantiate_safely(TransactionResponseEntity, entity) @@ -173,7 +173,7 @@ def resolve_entities(cls, response, *args): logger.exception(exc) @classmethod - def resolve_response_json(cls, response, *args): + def resolve_response_json(cls, response, **args): return json.dumps(response.response_json) @@ -188,11 +188,11 @@ class TransactionDocument(graphene.ObjectType): response = graphene.Field(TransactionResponse) @classmethod - def resolve_doc_size(cls, document, *args): + def resolve_doc_size(cls, document, **args): return len(document.doc) @classmethod - def resolve_response(cls, document, *args): + def resolve_response(cls, document, **args): try: response_json = json.loads(document.response_json) return instantiate_safely(TransactionResponse, response_json) @@ -200,7 +200,7 @@ def resolve_response(cls, document, *args): logger.exception(exc) @classmethod - def resolve_response_json(cls, document, *args): + def resolve_response_json(cls, document, **args): try: return document.response_json except Exception as exc: @@ -232,26 +232,26 @@ class TransactionLog(graphene.ObjectType): # f(x) -> x for all others } - def resolve_project_id(self, args, info): + def resolve_project_id(self, info, **args): return '{}-{}'.format(self.program, self.project) - def resolve_documents(self, args, info): + def resolve_documents(self, info, **args): return [TransactionDocument(**dict( column_dict(r), **{'response_json': json.dumps(r.response_json)} )) for r in self.documents] - def resolve_snapshots(self, args, info): + def resolve_snapshots(self, info, **args): return [ TransactionSnapshot(**column_dict(r)) for r in self.snapshots ] - def resolve_type(self, args, info): + def resolve_type(self, info, **args): """Classify the type of transaction by the transaction.roll""" return self.TYPE_MAP.get(self.role.lower(), self.role.lower()) - def resolve_related_cases(self, args, info): + def resolve_related_cases(self, info, **args): if not CACHE_CASES: return [] related_cases = {} @@ -275,16 +275,16 @@ def get_transaction_log_args(): id=graphene.ID(), type=graphene.String(), quick_search=graphene.ID(), - project_id=graphene.List(graphene.String()), + project_id=graphene.List(graphene.String), project=graphene.String(), program=graphene.String(), order_by_asc=graphene.String(), order_by_desc=graphene.String(), - related_cases=graphene.List(graphene.String()), + related_cases=graphene.List(graphene.String), first=graphene.Int(), last=graphene.Int(), offset=graphene.Int(), - entities=graphene.List(graphene.String()), + entities=graphene.List(graphene.String), is_dry_run=graphene.Boolean(), closed=graphene.Boolean(), committable=graphene.Boolean(description='(committable: true) means (is_dry_run: true) AND (closed: false) AND (state: "SUCCEEDED") AND (committed_by is None). Note: committed_by is None cannot be represented in GraphQL, hence this argument.'), @@ -293,12 +293,10 @@ def get_transaction_log_args(): ) -def resolve_transaction_log_query(self, args, info): +def resolve_transaction_log_query(self, info, **args): sortable = ['id', 'submitter', 'role', 'program', 'project', 'created_datetime', 'canonical_json', 'project_id'] - if not hasattr(flask.g, 'read_access_projects'): - flask.g.read_access_projects = flask.g.user.get_project_ids('read') q = flask.current_app.db.nodes(sub.TransactionLog).filter( sub.TransactionLog.project_id.in_(flask.g.read_access_projects) ) @@ -376,17 +374,18 @@ def resolve_transaction_log_query(self, args, info): return q -def resolve_transaction_log(self, args, info): - q = resolve_transaction_log_query(self, args, info) - return [TransactionLog(**dict( - documents=r.documents, - snapshots=r.entities, - **column_dict(r) - )) for r in q.all()] +def resolve_transaction_log(self, info, **args): + q = resolve_transaction_log_query(self, info, **args) + def fast_fix_dict(r): + good_dict = r.__dict__.copy() + del good_dict['_sa_instance_state'] + good_dict['snapshots'] = r.entities + return good_dict + return [TransactionLog(**fast_fix_dict(r)) for r in q.all()] -def resolve_transaction_log_count(self, args, info): - q = resolve_transaction_log_query(self, args, info) +def resolve_transaction_log_count(self, info, **args): + q = resolve_transaction_log_query(self, info, **args) q = q.limit(args.get('first', None)) return q.count() @@ -397,6 +396,6 @@ def resolve_transaction_log_count(self, args, info): ) TransactionLogCountField = graphene.Field( - graphene.Int(), + graphene.Int, args=get_transaction_log_args(), ) diff --git a/peregrine/resources/submission/graphql/util.py b/peregrine/resources/submission/graphql/util.py index bc7d19b8..5783c46b 100644 --- a/peregrine/resources/submission/graphql/util.py +++ b/peregrine/resources/submission/graphql/util.py @@ -90,16 +90,7 @@ def authorization_filter(q): ``project_id`` while maintaining filter correctness. """ - - try: - fg.read_access_projects = fg.get('read_access_projects') or fg.user.get_project_ids('read') - except (AuthError, UserError) as e: - capp.logger.exception(e) - raise GraphQLError(str(e)) - except Exception as e: - capp.logger.exception(e) - raise InternalError() - + cls = q.entity() if cls == psqlgraph.Node or hasattr(cls, 'project_id'): q = q.filter(cls._props['project_id'].astext.in_(fg.read_access_projects)) diff --git a/requirements.txt b/requirements.txt index b26588de..0b854871 100644 --- a/requirements.txt +++ b/requirements.txt @@ -18,20 +18,22 @@ boto==2.36.0 elasticsearch==1.2.0 itsdangerous==0.24 requests==2.5.2 -six==1.8.0 +six==1.10.0 urllib3==1.10.0 wsgiref==0.1.2 dicttoxml==1.5.8 sqlalchemy==0.9.9 python-dateutil==2.4.2 -graphql-core==1.1 -graphene==0.10.2 +graphene==2.0.1 +graphql-core==2.0 +graphql-relay==0.4.5 +cyordereddict==1.0.0 Flask-SQLAlchemy-Session==1.1 -e git+https://git@github.com/uc-cdis/indexclient.git@1.0#egg=indexclient -e git+https://git@github.com/uc-cdis/cdis_oauth2client.git@0.1.1#egg=cdis_oauth2client -e git+https://git@github.com/uc-cdis/datadictionary.git@0.1.1#egg=gdcdictionary -e git+https://git@github.com/NCI-GDC/gdcdatamodel.git@568aa07e686fd06d116f67521bb5c8db672cd1cd#egg=gdcdatamodel --e git+https://git@github.com/NCI-GDC/psqlgraph.git@4fbeddeb058ad09741c5d2b9aacbd1c63cfc3dd1#egg=psqlgraph +-e git+https://git@github.com/NCI-GDC/psqlgraph.git@5cddf49dd03a25bd4e553161d7ad7b9a6fe0ac0d#egg=psqlgraph -e git+https://git@github.com/NCI-GDC/cdisutils.git@8a8e599fdab5ade9bd8c586132d974a102e5d72d#egg=cdisutils -e git+https://git@github.com/uc-cdis/userdatamodel.git@1.0.2#egg=userdatamodel -e git+https://git@github.com/uc-cdis/cdis-python-utils.git@0.1.5#egg=cdispyutils diff --git a/tests/conftest.py b/tests/conftest.py index 86aa8e1f..7294c32f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -120,37 +120,30 @@ def run_signpost(port): Signpost({"driver": "inmemory", "layers": ["validator"]}).run( host="localhost", port=port, debug=False) - -@pytest.fixture -def app(tmpdir, request): - - # import sheepdog - # sheepdog_blueprint = sheepdog.blueprint.create_blueprint( - # gdcdictionary.gdcdictionary, gdcdatamodel.models - # ) - +@pytest.fixture(scope="session") +def start_signpost(request): port = 8000 signpost = Process(target=run_signpost, args=[port]) signpost.start() wait_for_signpost_alive(port) - gencode_json = tmpdir.mkdir("slicing").join("test_gencode.json") - gencode_json.write(json.dumps({ - 'a_gene': ['chr1', None, 200], - 'b_gene': ['chr1', 150, 300], - 'c_gene': ['chr1', 200, None], - 'd_gene': ['chr1', None, None], - })) - def teardown(): signpost.terminate() wait_for_signpost_not_alive(port) - _app.config.from_object("peregrine.test_settings") - _app.config['SLICING']['gencode'] = str(gencode_json.realpath()) - request.addfinalizer(teardown) + +@pytest.fixture(scope='session') +def app(request, start_signpost): + + # import sheepdog + # sheepdog_blueprint = sheepdog.blueprint.create_blueprint( + # gdcdictionary.gdcdictionary, gdcdatamodel.models + # ) + + + _app.config.from_object("peregrine.test_settings") app_init(_app) #_app.register_blueprint(sheepdog_blueprint, url_prefix='/v0/submission') @@ -191,8 +184,7 @@ def fin(): @pytest.fixture -def pg_driver(request, client): - pg_driver = PsqlGraphDriver(**pg_config()) +def pg_driver_clean(request, pg_driver): def tearDown(): with pg_driver.engine.begin() as conn: @@ -210,11 +202,20 @@ def tearDown(): conn.execute('delete from transaction_logs') user_teardown() - tearDown() + tearDown() #cleanup potential last test data user_setup() request.addfinalizer(tearDown) return pg_driver +@pytest.fixture(scope="session") +def pg_driver(request): + pg_driver = PsqlGraphDriver(**pg_config()) + + def closeConnection(): + pg_driver.engine.dispose() + + request.addfinalizer(closeConnection) + return pg_driver def user_setup(): key = Fernet(HMAC_ENCRYPTION_KEY) diff --git a/tests/graphql/test_graphql.py b/tests/graphql/test_graphql.py index bf7e8841..a8cf1cdf 100644 --- a/tests/graphql/test_graphql.py +++ b/tests/graphql/test_graphql.py @@ -17,7 +17,7 @@ path = '/v0/submission/graphql' def post_example_entities_together( - client, pg_driver, submitter, data_fnames=data_fnames): + client, pg_driver_clean, submitter, data_fnames=data_fnames): path = BLGSP_PATH data = [] for fname in data_fnames: @@ -26,7 +26,7 @@ def post_example_entities_together( return client.post(path, headers=submitter(path, 'post'), data=json.dumps(data)) -def put_example_entities_together(client, pg_driver, submitter): +def put_example_entities_together(client, pg_driver_clean, submitter): path = BLGSP_PATH data = [] for fname in data_fnames: @@ -62,8 +62,8 @@ def put_cgci_blgsp(client, auth=None, role='admin'): return r -def test_node_subclasses(client, submitter, pg_driver, cgci_blgsp): - post_example_entities_together(client, pg_driver, submitter) +def test_node_subclasses(client, submitter, pg_driver_clean, cgci_blgsp): + post_example_entities_together(client, pg_driver_clean, submitter) for cls in Node.get_subclasses(): print cls data = json.dumps({ @@ -74,8 +74,8 @@ def test_node_subclasses(client, submitter, pg_driver, cgci_blgsp): assert cls.label in r.json['data'], r.data -def test_alias(client, submitter, pg_driver, cgci_blgsp): - post_example_entities_together(client, pg_driver, submitter) +def test_alias(client, submitter, pg_driver_clean, cgci_blgsp): + post_example_entities_together(client, pg_driver_clean, submitter) data = json.dumps({ 'query': """query Test { alias1: case { id } }""" }) @@ -83,9 +83,8 @@ def test_alias(client, submitter, pg_driver, cgci_blgsp): assert 'alias1' in r.json.get('data', {}), r.data -def test_types(client, submitter, pg_driver, cgci_blgsp): - post = post_example_entities_together(client, pg_driver, submitter) - print(post.data) +def test_types(client, submitter, pg_driver_clean, cgci_blgsp): + post = post_example_entities_together(client, pg_driver_clean, submitter) assert post.status_code == 201 r = client.post(path, headers=submitter(path, 'post'), data=json.dumps({ 'query': """query Test { @@ -94,21 +93,20 @@ def test_types(client, submitter, pg_driver, cgci_blgsp): }""" })) - print r.data + print("types data is " + str(r.json)) assert isinstance(r.json['data']['boolean'][0]['is_ffpe'], bool) assert isinstance(r.json['data']['float'][0]['concentration'], float) -def test_unauthorized_graphql_query(client, submitter, pg_driver, cgci_blgsp): - post_example_entities_together(client, pg_driver, submitter) +def test_unauthorized_graphql_query(client, submitter, pg_driver_clean, cgci_blgsp): + post_example_entities_together(client, pg_driver_clean, submitter) r = client.post(path, headers={}, data=json.dumps({ 'query': """query Test { alias1: case { id } }""" })) assert r.status_code == 403, r.data - -def test_fragment(client, submitter, pg_driver, cgci_blgsp): - post_example_entities_together(client, pg_driver, submitter) +def test_fragment(client, submitter, pg_driver_clean, cgci_blgsp): + post_example_entities_together(client, pg_driver_clean, submitter) r = client.post(path, headers=submitter(path, 'post'), data=json.dumps({ 'query': """ query Test { @@ -123,8 +121,8 @@ def test_fragment(client, submitter, pg_driver, cgci_blgsp): assert 'amount' not in case -def test_viewer(client, submitter, pg_driver, cgci_blgsp): - post_example_entities_together(client, pg_driver, submitter) +def test_viewer(client, submitter, pg_driver_clean, cgci_blgsp): + post_example_entities_together(client, pg_driver_clean, submitter) r = client.post(path, headers=submitter(path, 'post'), data=json.dumps({ 'query': """ query Test { viewer { case { id type } } } @@ -134,8 +132,8 @@ def test_viewer(client, submitter, pg_driver, cgci_blgsp): assert 'type' in case -def test_node_interface(client, submitter, pg_driver, cgci_blgsp): - post_example_entities_together(client, pg_driver, submitter) +def test_node_interface(client, submitter, pg_driver_clean, cgci_blgsp): + post_example_entities_together(client, pg_driver_clean, submitter) r = client.post(path, headers=submitter(path, 'post'), data=json.dumps({ 'query': """query Test { node { id type project_id created_datetime @@ -149,10 +147,10 @@ def test_node_interface(client, submitter, pg_driver, cgci_blgsp): assert 'created_datetime' in node -def test_quicksearch(client, submitter, pg_driver, cgci_blgsp): - post_example_entities_together(client, pg_driver, submitter) - with pg_driver.session_scope(): - aliquot = pg_driver.nodes(models.Aliquot).first() +def test_quicksearch(client, submitter, pg_driver_clean, cgci_blgsp): + post_example_entities_together(client, pg_driver_clean, submitter) + with pg_driver_clean.session_scope(): + aliquot = pg_driver_clean.nodes(models.Aliquot).first() r = client.post(path, headers=submitter(path, 'post'), data=json.dumps({ 'query': """query Test { aliquot(quick_search: "%s") { id type project_id submitter_id }} @@ -170,9 +168,9 @@ def test_quicksearch(client, submitter, pg_driver, cgci_blgsp): } -def test_node_interface_project_id(client, submitter, pg_driver): +def test_node_interface_project_id(client, submitter, pg_driver_clean): assert put_cgci_blgsp(client, auth=submitter).status_code == 200 - post = post_example_entities_together(client, pg_driver, submitter) + post = post_example_entities_together(client, pg_driver_clean, submitter) assert post.status_code == 201 r = client.post(path, headers=submitter(path, 'post'), data=json.dumps({ 'query': """query Test { @@ -184,8 +182,8 @@ def test_node_interface_project_id(client, submitter, pg_driver): assert not r.json['data']['b'] -def test_node_interface_of_type(client, submitter, pg_driver, cgci_blgsp): - post_example_entities_together(client, pg_driver, submitter) +def test_node_interface_of_type(client, submitter, pg_driver_clean, cgci_blgsp): + post_example_entities_together(client, pg_driver_clean, submitter) data = json.dumps({ 'query': """ query Test { @@ -202,8 +200,8 @@ def test_node_interface_of_type(client, submitter, pg_driver, cgci_blgsp): assert not {'case'}.symmetric_difference(types) -def test_arg_props(client, submitter, pg_driver, cgci_blgsp): - post_example_entities_together(client, pg_driver, submitter) +def test_arg_props(client, submitter, pg_driver_clean, cgci_blgsp): + post_example_entities_together(client, pg_driver_clean, submitter) r = client.post(path, headers=submitter(path, 'post'), data=json.dumps({ 'query': """ query Test { sample (project_id: "CGCI-BLGSP") { project_id }} @@ -221,8 +219,8 @@ def test_arg_props(client, submitter, pg_driver, cgci_blgsp): assert not data['sample'] -def test_project_project_id_filter(client, submitter, pg_driver, cgci_blgsp): - post_example_entities_together(client, pg_driver, submitter) +def test_project_project_id_filter(client, submitter, pg_driver_clean, cgci_blgsp): + post_example_entities_together(client, pg_driver_clean, submitter) r = client.post(path, headers=submitter(path, 'post'), data=json.dumps({ 'query': """ query Test { @@ -244,8 +242,8 @@ def test_project_project_id_filter(client, submitter, pg_driver, cgci_blgsp): } -def test_arg_first(client, submitter, pg_driver, cgci_blgsp): - post_example_entities_together(client, pg_driver, submitter) +def test_arg_first(client, submitter, pg_driver_clean, cgci_blgsp): + post_example_entities_together(client, pg_driver_clean, submitter) r = client.post(path, headers=submitter(path, 'post'), data=json.dumps({ 'query': """ query Test { case (first: 1) { submitter_id }} """})) assert r.json == { @@ -257,8 +255,8 @@ def test_arg_first(client, submitter, pg_driver, cgci_blgsp): }, r.data -def test_arg_offset(client, submitter, pg_driver, cgci_blgsp): - post_example_entities_together(client, pg_driver, submitter) +def test_arg_offset(client, submitter, pg_driver_clean, cgci_blgsp): + post_example_entities_together(client, pg_driver_clean, submitter) r = client.post(path, headers=submitter(path, 'post'), data=json.dumps({ 'query': """ query Test { case (first: 5) { id }} """})) first = {c['id'] for c in r.json['data']['case']} @@ -271,8 +269,8 @@ def test_arg_offset(client, submitter, pg_driver, cgci_blgsp): @pytest.mark.skip(reason='must rewrite query') -def test_with_path(client, submitter, pg_driver, cgci_blgsp): - post_example_entities_together(client, pg_driver, submitter) +def test_with_path(client, submitter, pg_driver_clean, cgci_blgsp): + post_example_entities_together(client, pg_driver_clean, submitter) data = json.dumps({ 'query': """ query Test { @@ -294,10 +292,10 @@ def test_with_path(client, submitter, pg_driver, cgci_blgsp): r.data -def test_with_path_to_any(client, submitter, pg_driver, cgci_blgsp): - post_example_entities_together(client, pg_driver, submitter) +def test_with_path_to_any(client, submitter, pg_driver_clean, cgci_blgsp): + post_example_entities_together(client, pg_driver_clean, submitter) - with pg_driver.session_scope() as s: + with pg_driver_clean.session_scope() as s: props = dict(project_id='CGCI-BLGSP', state='validated') case1 = models.Case('case1', submitter_id='case1', **props) case2 = models.Case('case2', submitter_id='case2', **props) @@ -349,8 +347,8 @@ def test_with_path_to_any(client, submitter, pg_driver, cgci_blgsp): }, r.data -def test_with_path_to_invalid_type(client, submitter, pg_driver, cgci_blgsp): - post_example_entities_together(client, pg_driver, submitter) +def test_with_path_to_invalid_type(client, submitter, pg_driver_clean, cgci_blgsp): + post_example_entities_together(client, pg_driver_clean, submitter) r = client.post(path, headers=submitter(path, 'post'), data=json.dumps({ 'query': """ query Test { @@ -363,10 +361,10 @@ def test_with_path_to_invalid_type(client, submitter, pg_driver, cgci_blgsp): @pytest.mark.skip(reason='test is wrong') -def test_without_path(client, submitter, pg_driver, cgci_blgsp): - post_example_entities_together(client, pg_driver, submitter) - with pg_driver.session_scope(): - blgsp = pg_driver.nodes(models.Project).props(code='BLGSP').one() +def test_without_path(client, submitter, pg_driver_clean, cgci_blgsp): + post_example_entities_together(client, pg_driver_clean, submitter) + with pg_driver_clean.session_scope(): + blgsp = pg_driver_clean.nodes(models.Project).props(code='BLGSP').one() blgsp.cases += [models.Case('id1', project_id='CGCI-BLGSP')] data = json.dumps({ 'query': """ @@ -387,12 +385,12 @@ def test_without_path(client, submitter, pg_driver, cgci_blgsp): @pytest.mark.skip(reason='test does not conform to latest dictionary') def test_counts_with_path_filter_multiple_paths( - client, submitter, pg_driver, cgci_blgsp): - post_example_entities_together(client, pg_driver, submitter) + client, submitter, pg_driver_clean, cgci_blgsp): + post_example_entities_together(client, pg_driver_clean, submitter) # create multiple paths - with pg_driver.session_scope() as s: - aliquot = pg_driver.nodes(models.Aliquot).first() + with pg_driver_clean.session_scope() as s: + aliquot = pg_driver_clean.nodes(models.Aliquot).first() print(dir(aliquot)) sample = aliquot.analytes[0].portions[0].samples[0] aliquot.samples = [sample] @@ -409,8 +407,8 @@ def test_counts_with_path_filter_multiple_paths( assert data['with'] == 1 -def test_with_path_negative(client, submitter, pg_driver, cgci_blgsp): - post_example_entities_together(client, pg_driver, submitter) +def test_with_path_negative(client, submitter, pg_driver_clean, cgci_blgsp): + post_example_entities_together(client, pg_driver_clean, submitter) r = client.post(path, headers=submitter(path, 'post'), data=json.dumps({ 'query': """ query Test { @@ -424,8 +422,8 @@ def test_with_path_negative(client, submitter, pg_driver, cgci_blgsp): @pytest.mark.skip(reason='test does not conform to latest dictionary') -def test_with_path_multiple(client, submitter, pg_driver, cgci_blgsp): - post_example_entities_together(client, pg_driver, submitter) +def test_with_path_multiple(client, submitter, pg_driver_clean, cgci_blgsp): + post_example_entities_together(client, pg_driver_clean, submitter) r = client.post(path, headers=submitter(path, 'post'), data=json.dumps({ 'query': """ query Test { @@ -440,9 +438,9 @@ def test_with_path_multiple(client, submitter, pg_driver, cgci_blgsp): r.data -def test_order_by_asc_id(client, submitter, pg_driver, cgci_blgsp): - utils.reset_transactions(pg_driver) - post_example_entities_together(client, pg_driver, submitter) +def test_order_by_asc_id(client, submitter, pg_driver_clean, cgci_blgsp): + utils.reset_transactions(pg_driver_clean) + post_example_entities_together(client, pg_driver_clean, submitter) r = client.post(path, headers=submitter(path, 'post'), data=json.dumps({ 'query': """query Test { case (order_by_asc: "id") { id }}"""})) print r.data @@ -451,8 +449,8 @@ def test_order_by_asc_id(client, submitter, pg_driver, cgci_blgsp): assert _original == _sorted, r.data -def test_order_by_desc_id(client, submitter, pg_driver, cgci_blgsp): - post_example_entities_together(client, pg_driver, submitter) +def test_order_by_desc_id(client, submitter, pg_driver_clean, cgci_blgsp): + post_example_entities_together(client, pg_driver_clean, submitter) r = client.post(path, headers=submitter(path, 'post'), data=json.dumps({ 'query': """query Test { case (order_by_desc: "id") { id }}"""})) print r.data @@ -461,8 +459,8 @@ def test_order_by_desc_id(client, submitter, pg_driver, cgci_blgsp): assert _original == _sorted, r.data -def test_order_by_asc_prop(client, submitter, pg_driver, cgci_blgsp): - post_example_entities_together(client, pg_driver, submitter) +def test_order_by_asc_prop(client, submitter, pg_driver_clean, cgci_blgsp): + post_example_entities_together(client, pg_driver_clean, submitter) r = client.post(path, headers=submitter(path, 'post'), data=json.dumps({ 'query': """query Test { case (order_by_asc: "submitter_id") { submitter_id @@ -474,8 +472,8 @@ def test_order_by_asc_prop(client, submitter, pg_driver, cgci_blgsp): assert _original == _sorted, r.data -def test_order_by_desc_prop(client, submitter, pg_driver, cgci_blgsp): - post_example_entities_together(client, pg_driver, submitter) +def test_order_by_desc_prop(client, submitter, pg_driver_clean, cgci_blgsp): + post_example_entities_together(client, pg_driver_clean, submitter) r = client.post(path, headers=submitter(path, 'post'), data=json.dumps({ 'query': """query Test { case (order_by_desc: "submitter_id") { submitter_id @@ -488,21 +486,21 @@ def test_order_by_desc_prop(client, submitter, pg_driver, cgci_blgsp): @pytest.mark.skip(reason='test does not conform to latest dictionary') -def test_auth_node_subclass(client, submitter, pg_driver, cgci_blgsp): - with pg_driver.session_scope(): - blgsp = pg_driver.nodes(models.Project).props(code='BLGSP').one() +def test_auth_node_subclass(client, submitter, pg_driver_clean, cgci_blgsp): + with pg_driver_clean.session_scope(): + blgsp = pg_driver_clean.nodes(models.Project).props(code='BLGSP').one() blgsp.cases += [models.Case('id1', project_id='CGCI-BLGSP')] blgsp.cases += [models.Case('id2', project_id='OTHER-OTHER')] r = client.post(path, headers=submitter(path, 'post'), data=json.dumps({ 'query': """query Test { case { project_id }}"""})) - with pg_driver.session_scope(): + with pg_driver_clean.session_scope(): assert len(r.json['data']['case']) == 1 -def test_auth_node_subclass_links(client, submitter, pg_driver, cgci_blgsp): - post_example_entities_together(client, pg_driver, submitter) - with pg_driver.session_scope() as s: - cases = pg_driver.nodes(models.Case).subq_path('samples').all() +def test_auth_node_subclass_links(client, submitter, pg_driver_clean, cgci_blgsp): + post_example_entities_together(client, pg_driver_clean, submitter) + with pg_driver_clean.session_scope() as s: + cases = pg_driver_clean.nodes(models.Case).subq_path('samples').all() for case in cases: for sample in case.samples: sample.project_id = 'OTHER-OTHER' @@ -511,17 +509,17 @@ def test_auth_node_subclass_links(client, submitter, pg_driver, cgci_blgsp): 'query': """query Test { case (with_links: ["samples"]) { submitter_id samples { id } _samples_count }}"""})) print r.data - with pg_driver.session_scope(): + with pg_driver_clean.session_scope(): for case in r.json['data']['case']: assert len(case['samples']) == 0, r.data assert case['_samples_count'] == 0, r.data @pytest.mark.skip(reason='"clinicals" is not a link name') -def test_with_links_any(client, submitter, pg_driver, cgci_blgsp): - post_example_entities_together(client, pg_driver, submitter) - with pg_driver.session_scope(): - ncases = pg_driver.nodes(models.Case).count() +def test_with_links_any(client, submitter, pg_driver_clean, cgci_blgsp): + post_example_entities_together(client, pg_driver_clean, submitter) + with pg_driver_clean.session_scope(): + ncases = pg_driver_clean.nodes(models.Case).count() r = client.post(path, headers=submitter(path, 'post'), data=json.dumps({ 'query': """query Test { a: _case_count (with_links_any: []) @@ -543,39 +541,46 @@ def test_with_links_any(client, submitter, pg_driver, cgci_blgsp): }, r.data -def test_auth_counts(client, submitter, pg_driver, cgci_blgsp): - post_example_entities_together(client, pg_driver, submitter) +def test_auth_counts(client, submitter, pg_driver_clean, cgci_blgsp): + post_example_entities_together(client, pg_driver_clean, submitter) #: number of nodes to change project_id on, there should #: actually only be 1 n = 1 - with pg_driver.session_scope() as s: - cases = pg_driver.nodes(models.Case).limit(n).all() + with pg_driver_clean.session_scope() as s: + cases = pg_driver_clean.nodes(models.Case).limit(n).all() for case in cases: case.project_id = 'OTHER-OTHER' s.merge(case) r = client.post(path, headers=submitter(path, 'post'), data=json.dumps({ 'query': """query Test { _case_count }"""})) - with pg_driver.session_scope(): + with pg_driver_clean.session_scope(): assert r.json['data']['_case_count'] == 0 -def test_auth_transaction_logs(client, submitter, pg_driver, cgci_blgsp): - utils.reset_transactions(pg_driver) - post_example_entities_together(client, pg_driver, submitter) - with pg_driver.session_scope() as s: - log = pg_driver.nodes(models.submission.TransactionLog).one() +def test_transaction_logs(client, submitter, pg_driver_clean, cgci_blgsp): + post_example_entities_together(client, pg_driver_clean, submitter) + r = client.post(path, headers=submitter(path, 'post'), data=json.dumps({ + 'query': """query Test { transaction_log { id, project_id, submitter } }"""})) + with pg_driver_clean.session_scope(): + assert len(r.json['data']['transaction_log']) == 2, r.data + +def test_auth_transaction_logs(client, submitter, pg_driver_clean, cgci_blgsp): + utils.reset_transactions(pg_driver_clean) + post_example_entities_together(client, pg_driver_clean, submitter) + with pg_driver_clean.session_scope() as s: + log = pg_driver_clean.nodes(models.submission.TransactionLog).one() log.program = 'OTHER' s.merge(log) r = client.post(path, headers=submitter(path, 'post'), data=json.dumps({ 'query': """query Test { transaction_log { id } }"""})) - with pg_driver.session_scope(): + with pg_driver_clean.session_scope(): assert len(r.json['data']['transaction_log']) == 0, r.data -def test_with_path_to(client, submitter, pg_driver, cgci_blgsp): - post_example_entities_together(client, pg_driver, submitter) - with pg_driver.session_scope(): - case_sub_id = pg_driver.nodes(models.Case).path('samples')\ +def test_with_path_to(client, submitter, pg_driver_clean, cgci_blgsp): + post_example_entities_together(client, pg_driver_clean, submitter) + with pg_driver_clean.session_scope(): + case_sub_id = pg_driver_clean.nodes(models.Case).path('samples')\ .first().submitter_id r = client.post(path, headers=submitter(path, 'post'), data=json.dumps({ 'query': """ @@ -587,10 +592,10 @@ def test_with_path_to(client, submitter, pg_driver, cgci_blgsp): assert r.json['data']['aliquot'] == [{'a': 'BLGSP-71-06-00019-01A-11D'}] -def test_variable(client, submitter, pg_driver, cgci_blgsp): - post_example_entities_together(client, pg_driver, submitter) - with pg_driver.session_scope(): - case = pg_driver.nodes(models.Case).path('samples').one() +def test_variable(client, submitter, pg_driver_clean, cgci_blgsp): + post_example_entities_together(client, pg_driver_clean, submitter) + with pg_driver_clean.session_scope(): + case = pg_driver_clean.nodes(models.Case).path('samples').one() r = client.post(path, headers=submitter(path, 'post'), data=json.dumps({ 'query': """ query Test ($caseId: String) { @@ -614,9 +619,9 @@ def test_variable(client, submitter, pg_driver, cgci_blgsp): } -def test_null_variable(client, submitter, pg_driver, cgci_blgsp): - utils.reset_transactions(pg_driver) - post_example_entities_together(client, pg_driver, submitter) +def test_null_variable(client, submitter, pg_driver_clean, cgci_blgsp): + utils.reset_transactions(pg_driver_clean) + post_example_entities_together(client, pg_driver_clean, submitter) r = client.post(path, headers=submitter(path, 'post'), data=json.dumps({ 'query': """ query Test ($projectId: [String]) { @@ -625,8 +630,8 @@ def test_null_variable(client, submitter, pg_driver, cgci_blgsp): } """, })) - with pg_driver.session_scope(): - cases = pg_driver.nodes(models.Case).count() + with pg_driver_clean.session_scope(): + cases = pg_driver_clean.nodes(models.Case).count() print r.data assert r.json == { @@ -637,10 +642,10 @@ def test_null_variable(client, submitter, pg_driver, cgci_blgsp): } -def test_property_lists(client, submitter, pg_driver, cgci_blgsp): - utils.reset_transactions(pg_driver) - post_example_entities_together(client, pg_driver, submitter) - with pg_driver.session_scope() as s: +def test_property_lists(client, submitter, pg_driver_clean, cgci_blgsp): + utils.reset_transactions(pg_driver_clean) + post_example_entities_together(client, pg_driver_clean, submitter) + with pg_driver_clean.session_scope() as s: s.merge( models.Case('case1', submitter_id='s1', project_id='CGCI-BLGSP') ) @@ -673,9 +678,9 @@ def test_property_lists(client, submitter, pg_driver, cgci_blgsp): assert response.json == expected_json, response.data -def test_not_property(client, submitter, pg_driver, cgci_blgsp): - post_example_entities_together(client, pg_driver, submitter) - with pg_driver.session_scope() as s: +def test_not_property(client, submitter, pg_driver_clean, cgci_blgsp): + post_example_entities_together(client, pg_driver_clean, submitter) + with pg_driver_clean.session_scope() as s: s.merge( models.Case('case1', submitter_id='s1', project_id='CGCI-BLGSP') ) @@ -698,7 +703,7 @@ def test_not_property(client, submitter, pg_driver, cgci_blgsp): }, r.data -def test_schema(client, submitter, pg_driver): +def test_schema(client, submitter, pg_driver_clean): r = client.post(path, headers=submitter(path, 'post'), data=json.dumps({ 'query': """ query IntrospectionQuery { @@ -785,7 +790,7 @@ def test_schema(client, submitter, pg_driver): def test_special_case_project_id( - client, submitter, pg_driver, cgci_blgsp, put_tcga_brca): + client, submitter, pg_driver_clean, cgci_blgsp, put_tcga_brca): put_tcga_brca(client, submitter) data = json.dumps({ 'query': """ @@ -817,8 +822,8 @@ def test_special_case_project_id( } -def test_catch_language_error(client, submitter, pg_driver, cgci_blgsp): - post_example_entities_together(client, pg_driver, submitter) +def test_catch_language_error(client, submitter, pg_driver_clean, cgci_blgsp): + post_example_entities_together(client, pg_driver_clean, submitter) r = client.post(path, headers=submitter(path, 'post'), data=json.dumps({ 'query': """{ case-1: case (first: 1) { id }} """})) assert r.status_code == 400, r.data @@ -832,8 +837,8 @@ def test_catch_language_error(client, submitter, pg_driver, cgci_blgsp): @pytest.mark.skip(reason='must rewrite query') def test_filter_empty_prop_list( - client, submitter, pg_driver, cgci_blgsp, monkeypatch): - post_example_entities_together(client, pg_driver, submitter) + client, submitter, pg_driver_clean, cgci_blgsp, monkeypatch): + post_example_entities_together(client, pg_driver_clean, submitter) utils.put_entity_from_file(client, 'read_group.json', submitter) utils.patch_indexclient(monkeypatch) utils.put_entity_from_file( @@ -858,9 +863,9 @@ def test_filter_empty_prop_list( def test_submitted_unaligned_reads_with_path_to_read_group( - client, submitter, pg_driver, cgci_blgsp): + client, submitter, pg_driver_clean, cgci_blgsp): """Regression for incorrect counts""" - post_example_entities_together(client, pg_driver, submitter) + post_example_entities_together(client, pg_driver_clean, submitter) utils.put_entity_from_file(client, 'read_group.json', submitter) files = [ @@ -870,8 +875,8 @@ def test_submitted_unaligned_reads_with_path_to_read_group( for i in range(3) ] - with pg_driver.session_scope() as s: - rg = pg_driver.nodes(models.ReadGroup).one() + with pg_driver_clean.session_scope() as s: + rg = pg_driver_clean.nodes(models.ReadGroup).one() rg.submitted_unaligned_reads_files = files rg = s.merge(rg) @@ -904,14 +909,14 @@ def test_submitted_unaligned_reads_with_path_to_read_group( } -def test_without_path_order(client, submitter, pg_driver, cgci_blgsp): +def test_without_path_order(client, submitter, pg_driver_clean, cgci_blgsp): """Assert that the ordering is applied after the exception""" - put_example_entities_together(client, pg_driver, submitter) + put_example_entities_together(client, pg_driver_clean, submitter) utils.put_entity_from_file(client, 'case.json', submitter) utils.put_entity_from_file(client, 'sample.json', submitter) - with pg_driver.session_scope(): - c = pg_driver.nodes(models.Case).one() + with pg_driver_clean.session_scope(): + c = pg_driver_clean.nodes(models.Case).one() c.samples = [] r = client.post(path, headers=submitter(path, 'post'), data=json.dumps({ @@ -933,9 +938,9 @@ def test_without_path_order(client, submitter, pg_driver, cgci_blgsp): def test_read_group_with_path_to_case( - client, submitter, pg_driver, cgci_blgsp): + client, submitter, pg_driver_clean, cgci_blgsp): """Regression for incorrect counts""" - put_example_entities_together(client, pg_driver, submitter) + put_example_entities_together(client, pg_driver_clean, submitter) utils.put_entity_from_file(client, 'read_group.json', submitter) data = json.dumps({ 'query': """