Commit 6b48c41e authored by Giannis Tsapelas's avatar Giannis Tsapelas
Browse files

new version

parent fe4c08e6
......@@ -8,7 +8,7 @@ from aggregator.models import *
class DatasetAdmin(admin.ModelAdmin):
fields = ("metadata_id", "title", "description", "order", "stored_at", "table_name", "private", "spatialEast",
"spatialSouth", "spatialNorth", "spatialWest", "temporalCoverageBegin", "temporalCoverageEnd",
"license", "observations", "publisher", "update_frequency", "db_user")
"license", "observations", "publisher", "update_frequency", "db_user", "hidden")
admin.site.register(Dataset, DatasetAdmin)
......
......@@ -27,6 +27,11 @@ def get_datatype(datatype):
def parse_field(f):
# print(f.keys())
# if f['id'] == 'https://w3id.org/cybele/component/id/_date':
# name = '_date'
# else:
# name = f['label']
name = f['label']
if 'UoM' in f.keys():
......@@ -132,11 +137,19 @@ def parse_dataset(profile):
class Command(BaseCommand):
help = 'Collects the datasets/tables on Presto and updates all the metadata'
help = 'Collects the datasets/tables on db and updates all the metadata'
def add_arguments(self, parser):
parser.add_argument(
'--force',
action='store_true',
help='Force metadata collection',
)
def handle(self, *args, **options):
# if not settings.COLLECT_METADATA:
# return
if not options['force']:
if not settings.COLLECT_METADATA:
return
# GET THE TABLES FROM HARMONIZATION PROFILES
# """ Old way """
......@@ -170,7 +183,7 @@ class Command(BaseCommand):
execution_outomes = list()
for profile in all_profile_list[:]:
print("Parsing:", profile['title'])
print("\n")
# print("\n")
# pdb.set_trace()
outcome, obj = parse_dataset(profile)
if obj:
......@@ -204,7 +217,9 @@ class Command(BaseCommand):
setattr(dataset, key, obj[key])
dataset.stored_at = 'CYBELE_LXS'
dataset.db_user = 'cybele'
if dataset.db_user is None:
dataset.db_user = 'demo'
dataset.save()
......
# Generated by Django 3.2.9 on 2022-03-03 08:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('aggregator', '0054_alter_datasetaccessrequest_creation_date'),
]
operations = [
migrations.AlterField(
model_name='dataset',
name='publisher',
field=models.TextField(blank=True, null=True),
),
]
......@@ -61,7 +61,7 @@ class Dataset(Model):
temporalCoverageEnd = DateTimeField(null=True, blank=True)
license = CharField(max_length=200, null=True, blank=True)
observations = CharField(max_length=200, null=True, blank=True)
publisher = TextField()
publisher = TextField(null=True, blank=True)
category = CharField(max_length=200, null=True, blank=True)
image_uri = TextField(default='/static/img/logo.png')
sample_rows = JSONField(null=True, blank=True)
......
......@@ -97,13 +97,8 @@ def process(self, dimension_values='', variable='', only_headers=False, commit=T
print("Fetched Rows")
except Exception as e:
traceback.print_tb(e.__traceback__)
if e.message.find('exceeded') >= 0:
print('MAX MEMORY EXCEEDED')
raise Exception('max_memory_exceeded')
else:
print(e.message)
print('other error')
raise Exception('error')
raise Exception('error')
print("First rows")
print(all_rows[:3])
print(header_sql_types)
......
......@@ -97,27 +97,37 @@ def get_username_from_token(token):
@login_required
@never_cache
def clean(request, pk=None):
user = get_user(request)
user, token = get_user(request)
storage_target = settings.STORAGE_TARGET
# if settings.CHECK_DATASET_ACCESS:
if True:
token = request.user.oidc_profile.access_token
# token = request.user.oidc_profile.access_token
headers = {'AUTHORIZATION': 'Bearer {0}'.format(token)}
r = requests.get(settings.BROKERAGE_ASSETS_ENDPOINT, headers=headers)
metadata_id_list_brokerage = r.json()
try:
print('Accessing Brokerage Engine...')
r = requests.get(
settings.BROKERAGE_ASSETS_ENDPOINT, headers=headers)
print(r)
metadata_id_list_brokerage = r.json()
except:
metadata_id_list_brokerage = []
metadata_id_list_brokerage = [
id.split('/')[-1] for id in metadata_id_list_brokerage]
str(id.split('/')[-1]) for id in metadata_id_list_brokerage]
print(metadata_id_list_brokerage)
username = get_username_from_token(token)
ece_url = settings.ECE_DATASETS_ENDPOINT + username
r = requests.get(ece_url, headers=headers)
# print(r)
metadata_id_list_ece = r.json()
try:
print('Accessing ECE...')
r = requests.get(ece_url, headers=headers)
print(r)
metadata_id_list_ece = r.json()
except:
metadata_id_list_ece = []
print(metadata_id_list_ece)
metadata_id_list_ece = [
id['metadata_id'].split('/')[-1] for id in metadata_id_list_ece]
str(id['metadata_id'].split('/')[-1]).replace('"', "") for id in metadata_id_list_ece]
print(metadata_id_list_ece)
metadata_id_list = metadata_id_list_brokerage + metadata_id_list_ece
......@@ -131,7 +141,8 @@ def clean(request, pk=None):
# dataset_list.order_by('order')
publisher_list = sorted(
set([d.publisher for d in dataset_list if d.publisher is not None]))
organization_list = sorted(set([d.publisher for d in dataset_list]))
organization_list = sorted(
set([d.publisher for d in dataset_list if d.publisher is not None]))
observation_list = sorted(
set([d.observations for d in dataset_list if d.observations is not None]))
license_list = sorted(
......@@ -186,7 +197,7 @@ def save_query(request, pk=None, temp=1):
# print pk, temp
# create or update
if not pk:
user = get_user(request)
user, token = get_user(request)
if user == 'redirect':
return JsonResponse({
'redirect': 'true',
......@@ -400,7 +411,7 @@ def get_config(request):
@never_cache
@login_required
def list_queries(request):
user = get_user(request)
user, token = get_user(request)
if user == 'redirect':
return JsonResponse({
'redirect': 'true',
......@@ -420,7 +431,7 @@ def list_queries(request):
# @login_required
def delete_query(request, pk):
user = get_user(request)
user, token = get_user(request)
if user == 'redirect':
return JsonResponse({
'redirect': 'true',
......@@ -443,6 +454,7 @@ def delete_query(request, pk):
def get_user(request):
user = None
access_token = None
if request.user.is_authenticated:
print("*Authenticated*")
oidc_profile = OpenIdConnectProfile.objects.get(user=request.user)
......@@ -518,7 +530,7 @@ def get_user(request):
print('**********')
print(user)
print('**********')
return user
return user, access_token
def collect_metadata(request):
......@@ -529,7 +541,7 @@ def collect_metadata(request):
@never_cache
# @login_required
def api_list_user_queries(request):
user = get_user(request)
user, token = get_user(request)
if user == 'redirect':
return JsonResponse({
'redirect': 'true',
......@@ -551,7 +563,7 @@ def api_list_user_queries(request):
@never_cache
# @login_required
def get_query_statement(request, query_id):
user = get_user(request)
user, token = get_user(request)
if user == 'redirect':
return JsonResponse({
'redirect': 'true',
......@@ -574,7 +586,7 @@ def get_query_statement(request, query_id):
@never_cache
# @login_required
def get_query_info(request, query_id):
user = get_user(request)
user, token = get_user(request)
if user == 'redirect':
return JsonResponse({
'redirect': 'true',
......@@ -786,7 +798,7 @@ def evaluate_access_to_datasets(user, dataset_list):
@csrf_exempt
def execute_query(request, pk=None):
print('executing')
user = get_user(request)
user, token = get_user(request)
if user == 'redirect':
return JsonResponse({
'redirect': 'true',
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment