Commit ef1a98d5 authored by Giannis Tsapelas's avatar Giannis Tsapelas
Browse files

updates to new version

parent e6c8efcf
......@@ -30,3 +30,9 @@ KEYCLOAK_REALM=
KEYCLOAK_CLIENT_ID=
KEYCLOAK_CLIENT_SECRET=
# Metadata
METADATA_API_ENDPOINT=
METADATA_SPARQL_ENDPOINT=
METADATA_TESTBED=
......@@ -30,6 +30,12 @@ KEYCLOAK_REALM=
KEYCLOAK_CLIENT_ID=
KEYCLOAK_CLIENT_SECRET=
# Metadata
METADATA_API_ENDPOINT=
METADATA_SPARQL_ENDPOINT=
METADATA_TESTBED=
# Gunicorn
# ------------------------------------------------------------------------------
WEB_CONCURRENCY=3
......@@ -6,8 +6,8 @@ from aggregator.models import *
class DatasetAdmin(admin.ModelAdmin):
fields = ("title", "description", "order", "table_name", "temporalCoverageBegin", "temporalCoverageEnd", "observations", "publisher", "update_frequency", "db_user")
pass
# fields = ("title", "description", "order", "table_name", "temporalCoverageBegin", "temporalCoverageEnd", "observations", "publisher", "update_frequency", "db_user")
admin.site.register(Dataset, DatasetAdmin)
......
This diff is collapsed.
# Generated by Django 3.1.5 on 2021-04-22 10:39
import datetime
from django.conf import settings
import django.contrib.postgres.fields
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('aggregator', '0049_auto_20200915_1527'),
]
operations = [
migrations.AddField(
model_name='dataset',
name='metadata_id',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AlterField(
model_name='dataset',
name='category',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AlterField(
model_name='dataset',
name='db_user',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='dataset',
name='last_updated',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name='dataset',
name='license',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AlterField(
model_name='dataset',
name='number_of_rows',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AlterField(
model_name='dataset',
name='observations',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AlterField(
model_name='dataset',
name='organization',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='datasets', to='aggregator.organization'),
),
migrations.AlterField(
model_name='dataset',
name='owner',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='dataset_owner', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='dataset',
name='references',
field=django.contrib.postgres.fields.ArrayField(base_field=models.TextField(), blank=True, null=True, size=None),
),
migrations.AlterField(
model_name='dataset',
name='sample_rows',
field=models.JSONField(blank=True, null=True),
),
migrations.AlterField(
model_name='dataset',
name='size_in_gb',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='dataset',
name='spatialEast',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AlterField(
model_name='dataset',
name='spatialNorth',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AlterField(
model_name='dataset',
name='spatialSouth',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AlterField(
model_name='dataset',
name='spatialWest',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AlterField(
model_name='dataset',
name='stored_at',
field=models.CharField(choices=[('LOCAL_POSTGRES', 'Local PostgreSQL instance'), ('UBITECH_POSTGRES', "UBITECH's PostgreSQL instance at http://212.101.173.21"), ('UBITECH_PRESTO', "UBITECH's PRESTO instance"), ('CYBELE_LXS', 'CYBELE_LXS'), ('UBITECH_SOLR', 'Solr instance at http://212.101.173.50:8983')], default='LOCAL_POSTGRES', max_length=32),
),
migrations.AlterField(
model_name='dataset',
name='temporalCoverageBegin',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name='dataset',
name='temporalCoverageEnd',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name='datasetaccessrequest',
name='creation_date',
field=models.DateTimeField(default=datetime.datetime(2021, 4, 22, 10, 39, 23, 756118)),
),
]
......@@ -35,51 +35,55 @@ class Organization(Model):
class Vessel_Identifier(Model):
dataset = ForeignKey('Dataset', related_name='vessel_identifiers', on_delete=CASCADE)
dataset = ForeignKey(
'Dataset', related_name='vessel_identifiers', on_delete=CASCADE)
column_name = CharField(max_length=200)
values_list = JSONField()
class Dataset(Model):
metadata_id = CharField(max_length=200, null=True, blank=True)
title = TextField()
source = TextField()
description = TextField()
order = IntegerField(default=999)
references = ArrayField(TextField(), null=True)
stored_at = CharField(max_length=32, choices=DATASET_STORAGES, default='LOCAL_POSTGRES')
references = ArrayField(TextField(), null=True, blank=True)
stored_at = CharField(
max_length=32, choices=DATASET_STORAGES, default='LOCAL_POSTGRES')
table_name = CharField(max_length=200)
private = BooleanField(default=False)
spatialEast = CharField(max_length=200, null=True)
spatialSouth = CharField(max_length=200, null=True)
spatialNorth = CharField(max_length=200, null=True)
spatialWest = CharField(max_length=200, null=True)
temporalCoverageBegin = DateTimeField(null=True)
temporalCoverageEnd = DateTimeField(null=True)
license = CharField(max_length=200, null=True)
observations = CharField(max_length=200, null=True)
spatialEast = CharField(max_length=200, null=True, blank=True)
spatialSouth = CharField(max_length=200, null=True, blank=True)
spatialNorth = CharField(max_length=200, null=True, blank=True)
spatialWest = CharField(max_length=200, null=True, blank=True)
temporalCoverageBegin = DateTimeField(null=True, blank=True)
temporalCoverageEnd = DateTimeField(null=True, blank=True)
license = CharField(max_length=200, null=True, blank=True)
observations = CharField(max_length=200, null=True, blank=True)
publisher = TextField()
category = CharField(max_length=200, null=True)
category = CharField(max_length=200, null=True, blank=True)
image_uri = TextField(default='/static/img/logo.png')
sample_rows = JSONField(null=True)
number_of_rows = CharField(max_length=200, null=True)
size_in_gb = FloatField(null=True)
sample_rows = JSONField(null=True, blank=True)
number_of_rows = CharField(max_length=200, null=True, blank=True)
size_in_gb = FloatField(null=True, blank=True)
update_frequency = CharField(max_length=200, default='-')
last_updated = DateTimeField(null=True)
owner = ForeignKey(User, related_name='dataset_owner', null=True, on_delete=CASCADE, default=None)
last_updated = DateTimeField(null=True, blank=True)
owner = ForeignKey(User, related_name='dataset_owner',
null=True, blank=True, on_delete=CASCADE, default=None)
metadata = JSONField(default=dict)
hascoverage_img = BooleanField(default=False)
arguments = JSONField(default=dict)
joined_with_dataset = models.ManyToManyField("self",through = 'JoinOfDatasets',
symmetrical=False,
related_name='joined_to')
joined_with_dataset = models.ManyToManyField("self", through='JoinOfDatasets',
symmetrical=False,
related_name='joined_to')
organization = ForeignKey(Organization, related_name='datasets', null=True, default=None, on_delete=CASCADE)
db_user = CharField(max_length=50, null=True)
organization = ForeignKey(Organization, related_name='datasets',
null=True, blank=True, default=None, on_delete=CASCADE)
db_user = CharField(max_length=50, null=True, blank=True)
def __str__(self):
return self.title
class Meta:
ordering = ['-id']
......@@ -113,10 +117,12 @@ class Dataset(Model):
try:
temporalCoverageBegin = self.temporalCoverageBegin
# temporalCoverageBegin_timestamp = long(time.mktime(temporalCoverageBegin.timetuple())) * 1000
td = temporalCoverageBegin.replace(tzinfo=None) - datetime(1970, 1, 1)
td = temporalCoverageBegin.replace(
tzinfo=None) - datetime(1970, 1, 1)
# print(self.pk, temporalCoverageBegin)
# print(self.pk, td)
temporalCoverageBegin_timestamp = (td.seconds + (td.days * 24 * 3600)) * 1000
temporalCoverageBegin_timestamp = (
td.seconds + (td.days * 24 * 3600)) * 1000
# print(self.pk, temporalCoverageBegin_timestamp)
return temporalCoverageBegin_timestamp
except:
......@@ -128,8 +134,10 @@ class Dataset(Model):
try:
temporalCoverageEnd = self.temporalCoverageEnd
# temporalCoverageBegin_timestamp = long(time.mktime(temporalCoverageBegin.timetuple())) * 1000
td = temporalCoverageEnd.replace(tzinfo=None) - datetime(1970, 1, 1)
temporalCoverageEnd_timestamp = (td.seconds + (td.days * 24 * 3600)) * 1000
td = temporalCoverageEnd.replace(
tzinfo=None) - datetime(1970, 1, 1)
temporalCoverageEnd_timestamp = (
td.seconds + (td.days * 24 * 3600)) * 1000
return temporalCoverageEnd_timestamp
except:
return ''
......@@ -151,7 +159,8 @@ class DatasetAccess(Model):
class DatasetAccessRequest(Model):
user = ForeignKey(User, on_delete=CASCADE)
resource = ForeignKey(Dataset, on_delete=CASCADE, related_name='resource')
status = CharField(max_length=20, choices=ACCESS_REQUEST_STATUS_CHOICES, default='open')
status = CharField(
max_length=20, choices=ACCESS_REQUEST_STATUS_CHOICES, default='open')
creation_date = DateTimeField(default=datetime.now())
response_date = DateTimeField(null=True)
......@@ -160,10 +169,11 @@ class DatasetAccessRequest(Model):
return 'dataset'
class JoinOfDatasets(Model):
dataset_first = models.ForeignKey(Dataset, on_delete=models.CASCADE, related_name='first')
dataset_second = models.ForeignKey(Dataset, on_delete=models.CASCADE, related_name='second')
dataset_first = models.ForeignKey(
Dataset, on_delete=models.CASCADE, related_name='first')
dataset_second = models.ForeignKey(
Dataset, on_delete=models.CASCADE, related_name='second')
view_name = models.CharField(max_length=100)
......@@ -181,18 +191,25 @@ class BaseVariable(Model):
class Dimension(BaseVariable):
variable = ForeignKey('Variable', related_name='dimensions', on_delete=CASCADE)
variable = ForeignKey(
'Variable', related_name='dimensions', on_delete=CASCADE)
data_column_name = CharField(max_length=255)
min = DecimalField(blank=True, null=True, default=None, max_digits=100, decimal_places=50)
max = DecimalField(blank=True, null=True, default=None, max_digits=100, decimal_places=50)
step = DecimalField(blank=True, null=True, default=None, max_digits=100, decimal_places=50)
min = DecimalField(blank=True, null=True, default=None,
max_digits=100, decimal_places=50)
max = DecimalField(blank=True, null=True, default=None,
max_digits=100, decimal_places=50)
step = DecimalField(blank=True, null=True, default=None,
max_digits=100, decimal_places=50)
axis = TextField(blank=True, null=True, default=None)
non_filterable = BooleanField(default=False)
class Meta:
ordering = ['pk']
def __str__(self):
return self.variable.dataset.title + ' = ' + self.variable.title + ' = ' + self.title
def __unicode__(self):
return u'%s' % self.title
......@@ -239,7 +256,8 @@ class Dimension(BaseVariable):
def get_values_from_db(self):
q_col_values = 'SELECT DISTINCT(%s) FROM %s ORDER BY %s' % \
(self.data_column_name, self.variable.data_table_name, self.data_column_name)
(self.data_column_name,
self.variable.data_table_name, self.data_column_name)
cursor = connection.cursor()
cursor.execute(q_col_values)
......@@ -307,7 +325,8 @@ class Dimension(BaseVariable):
class Variable(BaseVariable):
dataset = ForeignKey('Dataset', related_name='variables', on_delete=CASCADE)
dataset = ForeignKey(
'Dataset', related_name='variables', on_delete=CASCADE)
scale_factor = FloatField(default=1)
add_offset = FloatField(default=0)
......@@ -315,11 +334,15 @@ class Variable(BaseVariable):
type_of_analysis = TextField(blank=True, null=True, default=None)
# {min, 10%, 25%, 50%, 75%, 90%, max}
distribution = ArrayField(FloatField(), size=7, blank=True, null=True, default=None)
distribution = ArrayField(FloatField(), size=7,
blank=True, null=True, default=None)
class Meta:
ordering = ['title']
def __str__(self):
return self.dataset.title + ' = ' + self.title
def __unicode__(self):
return u'%s' % self.title
......@@ -345,9 +368,9 @@ class Variable(BaseVariable):
if self.dataset.stored_at == 'UBITECH_POSTGRES':
return self.dataset.table_name
elif self.dataset.stored_at == 'UBITECH_PRESTO':
return self.dataset.table_name
return self.dataset.table_name
elif self.dataset.stored_at == 'CYBELE_LXS':
return self.dataset.table_name
return self.dataset.table_name
else:
return self.safe_name + ('_%d' % self.pk)
......@@ -374,13 +397,15 @@ class Variable(BaseVariable):
def delete_data_table(self, cursor):
# delete indeces
for d in self.dimensions.all():
cursor.execute('DROP INDEX IF EXISTS idx_%d_%s;' % (self.pk, d.data_column_name))
cursor.execute('DROP INDEX IF EXISTS idx_%d_%s;' %
(self.pk, d.data_column_name))
# delete table
cursor.execute('DROP TABLE IF EXISTS %s;' % self.data_table_name)
def count_values(self, cursor):
cursor.execute("SELECT reltuples::BIGINT AS estimate FROM pg_class WHERE relname='%s'" % self.data_table_name)
cursor.execute(
"SELECT reltuples::BIGINT AS estimate FROM pg_class WHERE relname='%s'" % self.data_table_name)
return cursor.fetchone()[0]
def update_distribution(self, cursor):
......
# pull official base image
FROM python:3.7-alpine
FROM python:3.8-alpine
# set work directory
WORKDIR /usr/src/cybele_advanced_query_builder
......@@ -21,12 +21,13 @@ RUN apk add --update git
COPY ./requirements/requirements_base.txt /usr/src/cybele_advanced_query_builder/requirements_base.txt
RUN \
apk add --no-cache postgresql-libs && \
apk add --no-cache --virtual .build-deps gcc musl-dev postgresql-dev && \
apk add --no-cache --virtual .build-deps gcc musl-dev postgresql-dev python3-dev && \
apk add --no-cache libressl-dev musl-dev libffi-dev openssl-dev cargo && \
python3 -m pip install -r requirements_base.txt && \
apk --purge del .build-deps
COPY ./requirements/requirements_dev.txt /usr/src/cybele_advanced_query_builder/requirements_extra.txt
RUN python3 -m pip install -r requirements_extra.txt
RUN pip3 install -r requirements_extra.txt
# create a "django" user
......
......@@ -14,17 +14,19 @@ done
echo "PostgreSQL started"
echo "Copying new Keycloak client and views"
cp -rf client.py /usr/local/lib/python3.7/site-packages/keycloak/
cp -rf views.py /usr/local/lib/python3.7/site-packages/django_keycloak/
cp -rf client.py /usr/local/lib/python3.8/site-packages/keycloak/
cp -rf views.py /usr/local/lib/python3.8/site-packages/django_keycloak/
echo "Migrating..."
python manage.py migrate --noinput
echo "Importing initial data"
python manage.py loaddata datasets.json
# echo "Importing initial data"
# python manage.py loaddata datasets.json
echo "Setting up Keycloak"
python manage.py setup_keycloak
echo "Setting up admin"
python manage.py initadmin
......
# pull official base image
FROM python:3.7-alpine
FROM python:3.8-alpine
# set work directory
WORKDIR /usr/src/cybele_advanced_query_builder
......@@ -21,12 +21,13 @@ RUN apk add --update git
COPY ./requirements/requirements_base.txt /usr/src/cybele_advanced_query_builder/requirements_base.txt
RUN \
apk add --no-cache postgresql-libs && \
apk add --no-cache --virtual .build-deps gcc musl-dev postgresql-dev && \
apk add --no-cache --virtual .build-deps gcc musl-dev postgresql-dev python3-dev && \
apk add --no-cache libressl-dev musl-dev libffi-dev openssl-dev cargo && \
python3 -m pip install -r requirements_base.txt && \
apk --purge del .build-deps
COPY ./requirements/requirements_prod.txt /usr/src/cybele_advanced_query_builder/requirements_extra.txt
RUN python3 -m pip install -r requirements_extra.txt
RUN pip3 install -r requirements_extra.txt
# create a "django" user
......
......@@ -42,17 +42,19 @@ done
exec "$@"
echo "Copying new Keycloak client and views"
cp -rf client.py /usr/local/lib/python3.7/site-packages/keycloak/
cp -rf views.py /usr/local/lib/python3.7/site-packages/django_keycloak/
cp -rf client.py /usr/local/lib/python3.8/site-packages/keycloak/
cp -rf views.py /usr/local/lib/python3.8/site-packages/django_keycloak/
echo "Migrating..."
python manage.py migrate --noinput
echo "Importing initial data"
python manage.py loaddata datasets.json
# echo "Importing initial data"
# python manage.py loaddata datasets.json
echo "Setting up Keycloak"
python manage.py setup_keycloak
echo "Setting up admin"
python manage.py initadmin
echo "Starting the server..."
......
......@@ -2,7 +2,8 @@ import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import uuid
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
BASE_DIR = os.path.dirname(os.path.dirname(
os.path.dirname(os.path.abspath(__file__))))
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ.get("SECRET_KEY")
......@@ -34,6 +35,7 @@ INSTALLED_APPS = [
'allauth',
'allauth.account',
'allauth.socialaccount',
# 'allauth.socialaccount.providers.keycloak',
'django_keycloak.apps.KeycloakAppConfig',
# apps
......@@ -63,7 +65,6 @@ DATABASES = {
}
AUTHENTICATION_BACKENDS = (
# Needed to login by username in Django admin, regardless of `allauth`
"django.contrib.auth.backends.ModelBackend",
......@@ -85,20 +86,24 @@ MIDDLEWARE = [
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# removed to allow embedding in the CYBELE Dashboard iframes
#'django.middleware.clickjacking.XFrameOptionsMiddleware',
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
'django_keycloak.middleware.BaseKeycloakMiddleware'
# 'django_keycloak.middleware.BaseKeycloakMiddleware'
'django_keycloak.middleware.KeycloakStatelessBearerAuthenticationMiddleware'
]
LOGIN_EXEMPT_URLS = (
# r'^$',
r'^terms/$',
r'^$',
r'^terms/$',
r'^about$',
r'^register$',
r'^accounts/',
r'^api/',
r'^',
r'^/',
)
ROOT_URLCONF = 'cybele_advanced_query_builder.urls'
......@@ -106,8 +111,7 @@ ROOT_URLCONF = 'cybele_advanced_query_builder.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates'), os.path.join(BASE_DIR, 'templates', 'allauth')]
,
'DIRS': [os.path.join(BASE_DIR, 'templates'), os.path.join(BASE_DIR, 'templates', 'allauth')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
......@@ -131,7 +135,8 @@ SETTINGS_EXPORT = [
KEYCLOAK_OIDC_PROFILE_MODEL = 'django_keycloak.OpenIdConnectProfile'
LOGIN_URL = 'keycloak_login'
KEYCLOAK_PERMISSIONS_METHOD = 'resource'
# KEYCLOAK_BEARER_AUTHENTICATION_EXEMPT_PATHS=(r'api/',)
KEYCLOAK_BEARER_AUTHENTICATION_EXEMPT_PATHS = (r'api/', r'/', r'')
# AUTH_PASSWORD_VALIDATORS = [
# # {
......@@ -153,6 +158,11 @@ KEYCLOAK_PERMISSIONS_METHOD = 'resource'
# ACCOUNT_EMAIL_VERIFICATION = 'mandatory'
METADATA_API_ENDPOINT = os.environ.get("METADATA_API_ENDPOINT"),
METADATA_SPARQL_ENDPOINT = os.environ.get("METADATA_SPARQL_ENDPOINT"),
METADATA_TESTBED = os.environ.get("METADATA_TESTBED"),
# Internationalization
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
......@@ -167,3 +177,11 @@ STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
]
# SOCIALACCOUNT_PROVIDERS = {
# 'keycloak': {
# 'KEYCLOAK_URL': 'https://cybele-uaa.org/auth',
# 'KEYCLOAK_REALM': 'master'
# }
# }
......@@ -18,14 +18,14 @@ from django.contrib import admin
urlpatterns = [
# admin
url(r'^admin/', admin.site.urls),
url(r'^aqb/admin/', admin.site.urls),
# authentication
url(r'^accounts/', include('allauth.urls')),
url(r'^aqb/accounts/', include('allauth.urls')),
# keycloak
url(r'^keycloak/', include('django_keycloak.urls')),
url(r'^aqb/keycloak/', include('django_keycloak.urls')),
# query designer
url(r'^', include('query_designer.urls')),
url(r'^aqb/', include('query_designer.urls')),
]