Skip to content

Commit

Permalink
Merge pull request #173 from IFB-ElixirFr/multiple-param-support-in-api
Browse files Browse the repository at this point in the history
Multiple param support in api
  • Loading branch information
thomasrosnet authored Nov 30, 2022
2 parents 98c53de + a51f5b4 commit 13621bf
Show file tree
Hide file tree
Showing 3 changed files with 164 additions and 43 deletions.
74 changes: 61 additions & 13 deletions app.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
Blueprint,
url_for,
)
from flask_restx import Resource, Api, fields
from flask_restx import Resource, Api, fields, reqparse
from flask_swagger_ui import get_swaggerui_blueprint
from flask_cors import CORS
from flask_socketio import SocketIO
Expand Down Expand Up @@ -406,10 +406,27 @@ def statistics():
# return {"data": num ** 2}


reqparse = reqparse.RequestParser()
reqparse.add_argument(
"url",
type=str,
required=True,
location="args",
help="The URL/DOI of the resource to be evaluated",
)


def generate_check_api(metric):
@fc_check_namespace.route("/metric_" + metric.get_principle_tag() + "/<path:url>")
@fc_check_namespace.route("/metric_" + metric.get_principle_tag())
class MetricEval(Resource):
def get(self, url):
@fc_check_namespace.doc(
"Evaluate " + metric.get_principle_tag() + " FAIR metric"
)
@fc_check_namespace.expect(reqparse)
def get(self):

args = reqparse.parse_args()
url = args["url"]

web_res = WebResource(url)
metric.set_web_resource(web_res)
Expand All @@ -434,10 +451,24 @@ def get(self, url):
generate_check_api(METRICS_CUSTOM[key])


@fc_check_namespace.route("/metrics_all/<path:url>")
@fc_check_namespace.route("/metrics_all")
# @fc_check_namespace.doc(url_fields)
class MetricEvalAll(Resource):
def get(self, url):
# reqparse = None
# def __init__(self, args):
# self.reqparse = reqparse.RequestParser()
# self.reqparse.add_argument('url', type = str, required = True, location='args', help="Name cannot be blank!")
# # self.reqparse.add_argument('test', type = str, required = True, location='args')
# # super(MetricEvalAll, self).__init__()

@fc_check_namespace.doc("Evaluates all FAIR metrics at once")
@fc_check_namespace.expect(reqparse)
def get(self):
"""All FAIR metrics"""

args = reqparse.parse_args()
url = args["url"]

web_res = WebResource(url)

results = []
Expand All @@ -459,11 +490,19 @@ def get(self, url):
return results


@fc_inspect_namespace.route("/get_rdf_metadata/<path:url>")
# fc_check_namespace.add_resource(MetricEvalAll, "/metrics_all")


@fc_inspect_namespace.route("/get_rdf_metadata")
class RetrieveMetadata(Resource):
@fc_inspect_namespace.produces(["application/ld+json"])
def get(self, url):
@fc_inspect_namespace.expect(reqparse)
def get(self):
"""Get RDF metadata in JSON-LD from a web resource"""

args = reqparse.parse_args()
url = args["url"]

web_res = WebResource(url)
data_str = web_res.get_rdf().serialize(format="json-ld")
data_json = json.loads(data_str)
Expand Down Expand Up @@ -493,13 +532,16 @@ def get(self, url):


def generate_ask_api(describe):
@fc_inspect_namespace.route(
"/" + describe.__name__ + "/<path:url>", methods=["GET"]
)
@fc_inspect_namespace.route("/" + describe.__name__, methods=["GET"])
@fc_inspect_namespace.route("/" + describe.__name__ + "/", methods=["POST"])
# @api.doc(params={"url": "An URL"})
class Ask(Resource):
def get(self, url):
@fc_inspect_namespace.expect(reqparse)
def get(self):

args = reqparse.parse_args()
url = args["url"]

web_res = WebResource(url)
kg = web_res.get_rdf()
old_kg = copy.deepcopy(kg)
Expand Down Expand Up @@ -552,10 +594,16 @@ def post(self):
generate_ask_api(describe)


@fc_inspect_namespace.route("/inspect_ontologies/<path:url>")
@fc_inspect_namespace.route("/inspect_ontologies")
class InspectOntologies(Resource):
def get(self, url):
# @fc_inspect_namespace.doc('Evaluates all FAIR metrics at once')
@fc_inspect_namespace.expect(reqparse)
def get(self):
"""Inspect if RDF properties and classes are found in ontology registries (OLS, LOV, BioPortal)"""

args = reqparse.parse_args()
url = args["url"]

web_res = WebResource(url)
kg = web_res.get_rdf()

Expand Down
112 changes: 92 additions & 20 deletions notebooks/inspect_describe_api.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@
"cells": [
{
"cell_type": "code",
"execution_count": 4,
"execution_count": 13,
"id": "23cb1a3e",
"metadata": {},
"outputs": [],
"source": [
Expand All @@ -13,36 +14,100 @@
},
{
"cell_type": "code",
"execution_count": 5,
"execution_count": 14,
"id": "0c7f4ba5",
"metadata": {},
"outputs": [],
"source": [
"base_url = \"http://0.0.0.0:5000\"\n",
"\n",
"api_endpoints = [\n",
" \"http://0.0.0.0:5000\" + \"/api/inspect/describe_openaire/\",\n",
" \"http://0.0.0.0:5000\" + \"/api/inspect/describe_opencitation/\",\n",
" \"http://0.0.0.0:5000\" + \"/api/inspect/describe_wikidata/\",\n",
" base_url + \"/api/inspect/describe_openaire?url=\",\n",
" base_url + \"/api/inspect/describe_opencitation?url=\",\n",
" base_url + \"/api/inspect/describe_wikidata?url=\",\n",
"]"
]
},
{
"cell_type": "code",
"execution_count": 6,
"execution_count": 15,
"id": "b31d89ff",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"REST request to dryad...\n"
"REST request to dryad...\n",
"Parsing result...\n",
"https://datadryad.org/api/v2/datasets?page=2&per_page=100\n",
"REST request to dryad...\n",
"https://datadryad.org/api/v2/datasets?page=3&per_page=100\n",
"REST request to dryad...\n",
"https://datadryad.org/api/v2/datasets?page=4&per_page=100\n",
"REST request to dryad...\n",
"https://datadryad.org/api/v2/datasets?page=5&per_page=100\n",
"REST request to dryad...\n",
"https://datadryad.org/api/v2/datasets?page=6&per_page=100\n",
"REST request to dryad...\n",
"https://datadryad.org/api/v2/datasets?page=7&per_page=100\n",
"REST request to dryad...\n",
"https://datadryad.org/api/v2/datasets?page=8&per_page=100\n",
"REST request to dryad...\n",
"https://datadryad.org/api/v2/datasets?page=9&per_page=100\n",
"REST request to dryad...\n",
"https://datadryad.org/api/v2/datasets?page=10&per_page=100\n",
"REST request to dryad...\n",
"https://datadryad.org/api/v2/datasets?page=11&per_page=100\n",
"REST request to dryad...\n",
"https://datadryad.org/api/v2/datasets?page=12&per_page=100\n",
"REST request to dryad...\n",
"https://datadryad.org/api/v2/datasets?page=13&per_page=100\n",
"REST request to dryad...\n",
"https://datadryad.org/api/v2/datasets?page=14&per_page=100\n",
"REST request to dryad...\n",
"https://datadryad.org/api/v2/datasets?page=15&per_page=100\n",
"REST request to dryad...\n",
"https://datadryad.org/api/v2/datasets?page=16&per_page=100\n",
"REST request to dryad...\n",
"https://datadryad.org/api/v2/datasets?page=17&per_page=100\n",
"REST request to dryad...\n",
"https://datadryad.org/api/v2/datasets?page=18&per_page=100\n",
"REST request to dryad...\n",
"https://datadryad.org/api/v2/datasets?page=19&per_page=100\n",
"REST request to dryad...\n",
"https://datadryad.org/api/v2/datasets?page=20&per_page=100\n",
"REST request to dryad...\n",
"https://datadryad.org/api/v2/datasets?page=21&per_page=100\n",
"REST request to dryad...\n",
"https://datadryad.org/api/v2/datasets?page=22&per_page=100\n",
"REST request to dryad...\n",
"https://datadryad.org/api/v2/datasets?page=23&per_page=100\n",
"REST request to dryad...\n",
"https://datadryad.org/api/v2/datasets?page=24&per_page=100\n",
"REST request to dryad...\n",
"https://datadryad.org/api/v2/datasets?page=25&per_page=100\n",
"REST request to dryad...\n",
"https://datadryad.org/api/v2/datasets?page=26&per_page=100\n",
"REST request to dryad...\n",
"https://datadryad.org/api/v2/datasets?page=27&per_page=100\n",
"REST request to dryad...\n",
"https://datadryad.org/api/v2/datasets?page=28&per_page=100\n",
"REST request to dryad...\n",
"https://datadryad.org/api/v2/datasets?page=29&per_page=100\n",
"REST request to dryad...\n",
"https://datadryad.org/api/v2/datasets?page=30&per_page=100\n",
"REST request to dryad...\n",
"https://datadryad.org/api/v2/datasets?page=31&per_page=100\n",
"REST request to dryad...\n",
"https://datadryad.org/api/v2/datasets?page=32&per_page=100\n",
"Retrieved 3100 DOIs\n",
"Stored 'dois_list' (list)\n"
]
}
],
"source": [
"NB_DOIS = 2000\n",
"NB_DOIS = 100\n",
"\n",
"%run get_dois_dryad.ipynb\n",
"filename = \"dryad_\" + str(NB_DOIS) + \".json\"\n",
Expand All @@ -58,15 +123,15 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 16,
"id": "2a7779fe",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"1000\n"
"3100\n"
]
}
],
Expand All @@ -87,15 +152,15 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 17,
"id": "af6cff78",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"1000\n"
"100\n"
]
}
],
Expand All @@ -105,25 +170,32 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 24,
"id": "7e005ad4",
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"100%|██████████| 1000/1000 [47:05<00:00, 2.83s/it] \n"
]
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "51bb5d287d4949acb66e34c0c2a3ffc2",
"version_major": 2,
"version_minor": 0
},
"text/plain": [
" 0%| | 0/100 [00:00<?, ?it/s]"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"results = []\n",
"for url in tqdm(samples_dois_list):\n",
" response = requests.get(\"http://0.0.0.0:5000/api/inspect/get_rdf_metadata/\" + url)\n",
" response = requests.get(base_url + \"/api/inspect/get_rdf_metadata?url=\" + url)\n",
" graph = json.dumps(response.json(), ensure_ascii=False)\n",
" for endpoint in api_endpoints:\n",
" response = requests.post(endpoint, json={\"json-ld\": graph, \"url\": url})\n",
" response = requests.post(endpoint.replace(\"?url=\", \"/\"), json={\"json-ld\": graph, \"url\": url})\n",
" result = {\n",
" \"url\": url,\n",
" \"endpoint\": endpoint.split(\"/\")[-2],\n",
Expand Down
Loading

0 comments on commit 13621bf

Please sign in to comment.