Skip to content

Commit

Permalink
Integrated Gemini API with Worker
Browse files Browse the repository at this point in the history
  • Loading branch information
Samuzzal Choudhury committed Mar 5, 2018
1 parent 4972908 commit b2600fb
Show file tree
Hide file tree
Showing 4 changed files with 65 additions and 27 deletions.
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
FROM registry.centos.org/centos/centos:7

ENV F8A_WORKER_VERSION=6ed8ed3
ENV F8A_WORKER_VERSION=d42f4f4

RUN yum install -y epel-release &&\
yum install -y gcc git python34-pip python34-requests httpd httpd-devel python34-devel &&\
Expand Down
36 changes: 36 additions & 0 deletions openshift/template.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,26 @@ objects:
secretKeyRef:
name: coreapi-postgres
key: username
- name: AWS_SQS_ACCESS_KEY_ID
valueFrom:
secretKeyRef:
name: aws
key: sqs-access-key-id
- name: AWS_SQS_SECRET_ACCESS_KEY
valueFrom:
secretKeyRef:
name: aws
key: sqs-secret-access-key
- name: AWS_S3_ACCESS_KEY_ID
valueFrom:
secretKeyRef:
name: aws
key: s3-access-key-id
- name: AWS_S3_SECRET_ACCESS_KEY
valueFrom:
secretKeyRef:
name: aws
key: s3-secret-access-key
- name: GEMINI_API_SERVICE_PORT
value: "5000"
- name: GEMINI_API_SERVICE_TIMEOUT
Expand All @@ -61,6 +81,15 @@ objects:
value: "4"
- name: CLASS_TYPE
value: "sync"
- name: DEPLOYMENT_PREFIX
valueFrom:
configMapKeyRef:
name: bayesian-config
key: deployment-prefix
- name: WORKER_ADMINISTRATION_REGION
value: "api"
- name: SENTRY_DSN
value: "${SENTRY_DSN}"
image: "${DOCKER_REGISTRY}/${DOCKER_IMAGE}:${IMAGE_TAG}"
name: fabric8-gemini-server
ports:
Expand Down Expand Up @@ -171,3 +200,10 @@ parameters:
required: true
name: GEMINI_API_SERVICE_PORT
value: "5000"

- description: Sentry DSN
displayName: Sentry DSN
required: false
name: SENTRY_DSN
value: ""

2 changes: 2 additions & 0 deletions src/rest_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,12 @@
from flask import Flask, request
from flask_cors import CORS
from utils import DatabaseIngestion, scan_repo, validate_request_data
from f8a_worker.setup_celery import init_selinon

app = Flask(__name__)
CORS(app)

init_selinon()

@app.route('/api/v1/readiness')
def readiness():
Expand Down
52 changes: 26 additions & 26 deletions src/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
from f8a_worker.models import OSIORegisteredRepos
from f8a_worker.setup_celery import init_celery
from selinon import run_flow
import datetime
import requests
Expand Down Expand Up @@ -190,35 +191,34 @@ def get_info(cls, search_key):
return {'is_valid': True, 'data': entry.to_dict()}


def scan_repo(data):
"""Scan function."""
return True
def server_run_flow(flow_name, flow_args):
"""Run a flow.
:param flow_name: name of flow to be run as stated in YAML config file
:param flow_args: arguments for the flow
:return: dispatcher ID handling flow
"""
logger.info('Running flow {}'.format(flow_name))
print('Running flow {}'.format(flow_name))
start = datetime.datetime.now()

class worker_selinon_flow:
"""Worker class to initialize async flows."""
init_celery(result_backend=False)
dispacher_id = run_flow(flow_name, flow_args)

def __init__(self):
"""Initialize class.
Intializes selinon for async functionality.
"""
init_selinon()
elapsed_seconds = (datetime.datetime.now() - start).total_seconds()
print("It took {t} seconds to start {f} flow.".format(
t=elapsed_seconds, f=flow_name))
logger.info("It took {t} seconds to start {f} flow.".format(
t=elapsed_seconds, f=flow_name))
return dispacher_id

def server_run_flow(self, flow_name, flow_args):
"""Run a flow.
:param flow_name: name of flow to be run as stated in YAML config file
:param flow_args: arguments for the flow
:return: dispatcher ID handling flow
"""
current_app.logger.debug('Running flow {}'.format(flow_name))
start = datetime.datetime.now()
def scan_repo(data):
"""Scan function."""
args = {'github_repo': data['git_url'],
'github_sha': data['git_sha'],
'email_ids': data['email_ids']}
d_id = server_run_flow('osioAnalysisFlow', args)
print ("DISPATCHER ID = {}".format(d_id))
return True

init_celery(result_backend=False)
dispacher_id = run_flow(flow_name, flow_args)

elapsed_seconds = (datetime.datetime.now() - start).total_seconds()
current_app.logger.debug("It took {t} seconds to start {f} flow.".format(
t=elapsed_seconds, f=flow_name))
return dispacher_id

0 comments on commit b2600fb

Please sign in to comment.