Compare commits
30 Commits
Author | SHA1 | Date |
---|---|---|
Sam | db9fc35715 | |
Sam | 7f4efbfe50 | |
Sam | bec1b2c796 | |
Sam | 5943fd5dd8 | |
Sam | ef28791bd3 | |
Sam | 54a32019c6 | |
Sam | 99ee771417 | |
Sam | 537cfdee1b | |
Sam | eb3d17884a | |
Sam | 5930fe90a1 | |
Sam | f279f5ade4 | |
Sam | 7253c40da1 | |
Sam | ab37400522 | |
Sam | a0a3f38ebd | |
Sam | e9cd27d5c4 | |
Sam | ce58616cbf | |
Sam | 886abd21c8 | |
Sam | 2d21aa98fc | |
Sam | 0e6e19d483 | |
Sam | 2d56c61856 | |
Sam | 8e7d963e26 | |
Sam | 6a882b6377 | |
Sam | 96f119efe5 | |
Sam | 13055a41fb | |
Sam | 4179b09e61 | |
Sam | c1805b2926 | |
Sam | 2233e8b865 | |
Sam | 9e8e3f27a5 | |
Sam | cb0fc7a843 | |
Sam | c31c1f5f06 |
|
@ -5,3 +5,5 @@
|
|||
/data
|
||||
backend/api_logs.txt
|
||||
*__pycache__*
|
||||
.env
|
||||
poetry.lock
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
# My personal website ([https://baseddata.io](https://baseddata.io))
|
||||
# My personal website ([https://bitlab21.com](https://bitlab21.com))
|
||||
|
||||
This site is made using [Hugo](https://gohugo.io/), a static website generator. The backend (which powers the api for getting data for the charts) is build in python and served using gunicorn.
|
||||
|
|
|
@ -0,0 +1,215 @@
|
|||
def parse_int(args):
|
||||
try:
|
||||
return int( args )
|
||||
except ValueError:
|
||||
print(f"{args} cannot be cast to int")
|
||||
raise
|
||||
|
||||
def mangrove_by_country_latest():
|
||||
return """
|
||||
select * from models_final.final__protected_mangroves_summary_stats_by_country_agg
|
||||
where year = '2020'
|
||||
order by cumulative_pixels_diff desc
|
||||
"""
|
||||
|
||||
def bitcoin_business_growth_timeseries(args):
|
||||
days_ago = parse_int(args["days_ago"])
|
||||
country_name = args["country_name"]
|
||||
return f"""
|
||||
select * from models_final.final__bitcoin_business_growth_by_country
|
||||
where days_ago <= {days_ago} and country_name = '{country_name}'
|
||||
order by date
|
||||
"""
|
||||
|
||||
def bitcoin_business_growth_percent_diff_days_ago(args):
|
||||
days_ago = parse_int(args["days_ago"])
|
||||
return f"""
|
||||
with
|
||||
filtered_data as (
|
||||
select country_name, date, days_ago, cumulative_value
|
||||
from models_final.final__bitcoin_business_growth_by_country
|
||||
where days_ago <= {days_ago}
|
||||
order by country_name, days_ago desc
|
||||
),
|
||||
first_and_last_values as (
|
||||
select
|
||||
country_name,
|
||||
date,
|
||||
days_ago,
|
||||
cumulative_value,
|
||||
first_value(cumulative_value) over (
|
||||
partition by country_name order by days_ago desc
|
||||
) as first_value,
|
||||
first_value(date) over (
|
||||
partition by country_name order by days_ago desc
|
||||
) as first_date,
|
||||
first_value(cumulative_value) over (
|
||||
partition by country_name order by days_ago
|
||||
) as last_value,
|
||||
first_value(date) over (
|
||||
partition by country_name order by days_ago
|
||||
) as last_date
|
||||
from filtered_data
|
||||
),
|
||||
diff as (
|
||||
select
|
||||
country_name,
|
||||
date,
|
||||
first_date,
|
||||
last_date,
|
||||
days_ago,
|
||||
cumulative_value,
|
||||
first_value,
|
||||
last_value,
|
||||
last_value - first_value as difference,
|
||||
round(
|
||||
100 * safe_divide((last_value - first_value), first_value), 2
|
||||
) as percent_difference
|
||||
from first_and_last_values
|
||||
)
|
||||
select *
|
||||
from diff
|
||||
where days_ago = 1
|
||||
order by difference desc
|
||||
"""
|
||||
# def bitcoin_business_growth_timeseries(query):
|
||||
# pipeline = [
|
||||
# {
|
||||
# "$match": {
|
||||
# "days_ago": {"$lte": int(query["days_ago"])},
|
||||
# "country_name": query["country_name"],
|
||||
# }
|
||||
# },
|
||||
# {
|
||||
# "$project": {
|
||||
# "country_name": "$country_name",
|
||||
# "date": "$date",
|
||||
# "cumulative_value": "$cumulative_value",
|
||||
# }
|
||||
# },
|
||||
# {"$sort": {"country_name": 1, "days_ago": 1}},
|
||||
# ]
|
||||
# return pipeline
|
||||
# def mangrove_by_country_latest():
|
||||
# pipeline = [
|
||||
# {
|
||||
# "$match": {"year": "2020"},
|
||||
# },
|
||||
# ]
|
||||
# return pipeline
|
||||
#
|
||||
#
|
||||
# def mangrove_by_country_agg(query):
|
||||
# pipeline = [
|
||||
# {"$match": {"country_with_parent": query["country_with_parent"]}},
|
||||
# {
|
||||
# "$group": {
|
||||
# "_id": {"country_with_parent": "$country_with_parent", "year": "$year"},
|
||||
# "total_pixels": {"$sum": "$total_n_pixels"},
|
||||
# }
|
||||
# },
|
||||
# {
|
||||
# "$project": {
|
||||
# "_id": 0,
|
||||
# "country_with_parent": "$_id.country_with_parent",
|
||||
# "year": "$_id.year",
|
||||
# "total_pixels": 1,
|
||||
# }
|
||||
# },
|
||||
# {"$sort": {"year": 1}},
|
||||
# ]
|
||||
# return pipeline
|
||||
#
|
||||
#
|
||||
# def bitcoin_business_growth_timeseries(query):
|
||||
# pipeline = [
|
||||
# {
|
||||
# "$match": {
|
||||
# "days_ago": {"$lte": int(query["days_ago"])},
|
||||
# "country_name": query["country_name"],
|
||||
# }
|
||||
# },
|
||||
# {
|
||||
# "$project": {
|
||||
# "country_name": "$country_name",
|
||||
# "date": "$date",
|
||||
# "cumulative_value": "$cumulative_value",
|
||||
# }
|
||||
# },
|
||||
# {"$sort": {"country_name": 1, "days_ago": 1}},
|
||||
# ]
|
||||
# return pipeline
|
||||
#
|
||||
#
|
||||
# def bitcoin_business_growth_percent_diff_days_ago(query):
|
||||
pipeline = [
|
||||
{"$match": {"days_ago": {"$lte": int(query["days_ago"])}}},
|
||||
{"$sort": {"country_name": 1, "days_ago": 1}},
|
||||
{
|
||||
"$group": {
|
||||
"_id": "$country_name",
|
||||
"firstvalue": {"$first": "$cumulative_value"},
|
||||
"lastvalue": {"$last": "$cumulative_value"},
|
||||
"firstdate": {"$min": "$date"},
|
||||
"lastdate": {"$max": "$date"},
|
||||
}
|
||||
},
|
||||
{
|
||||
"$project": {
|
||||
"country_name": "$_id",
|
||||
"first_value": "$firstvalue",
|
||||
"last_value": "$lastvalue",
|
||||
"difference": {
|
||||
"$subtract": [
|
||||
{"$todouble": "$firstvalue"},
|
||||
{"$todouble": "$lastvalue"},
|
||||
]
|
||||
},
|
||||
"first_date": "$firstdate",
|
||||
"last_date": "$lastdate",
|
||||
"percent_difference": {
|
||||
"$cond": {
|
||||
"if": {"$eq": [{"$todouble": "$lastvalue"}, 0]},
|
||||
"then": {
|
||||
"$cond": {
|
||||
"if": {"$gt": [{"$todouble": "$firstvalue"}, 0]},
|
||||
"then": "new",
|
||||
"else": "none",
|
||||
}
|
||||
},
|
||||
"else": {
|
||||
"$round": [
|
||||
{
|
||||
"$multiply": [
|
||||
{
|
||||
"$divide": [
|
||||
{
|
||||
"$subtract": [
|
||||
{"$todouble": "$firstvalue"},
|
||||
{"$todouble": "$lastvalue"},
|
||||
]
|
||||
},
|
||||
{"$todouble": "$lastvalue"},
|
||||
]
|
||||
},
|
||||
100,
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
]
|
||||
return pipeline
|
||||
#
|
||||
#
|
||||
# def bitcoin_business_growth_latest(query):
|
||||
# pipeline = [
|
||||
# {
|
||||
# "$match": query["filter"],
|
||||
# },
|
||||
# {"$sort": {"date": 1}},
|
||||
# ]
|
||||
# return pipeline
|
|
@ -0,0 +1,34 @@
|
|||
from psycopg2.extras import RealDictCursor
|
||||
import psycopg2, os
|
||||
|
||||
class PostgresHandler:
|
||||
def __init__(self):
|
||||
self.connection = self.connect_to_pg()
|
||||
self.cur = self.connection.cursor(cursor_factory=RealDictCursor)
|
||||
|
||||
def connect_to_pg(self):
|
||||
try:
|
||||
connection = psycopg2.connect(
|
||||
dbname=os.getenv('PGDATABASE'),
|
||||
host=os.getenv('PGHOST'),
|
||||
user=os.getenv('PGUSER'),
|
||||
password=os.getenv('PGPASSWORD'),
|
||||
port=os.getenv('PGPORT'),
|
||||
)
|
||||
except Exception as e:
|
||||
message=f"Connection to postgres database failed: {e}"
|
||||
raise Exception(message)
|
||||
print(f"Successfully connected to DB")
|
||||
return connection
|
||||
|
||||
def execute_query(self, query):
|
||||
try:
|
||||
self.cur.execute(query)
|
||||
results = self.cur.fetchall()
|
||||
self.connection.commit()
|
||||
self.connection.close()
|
||||
return results
|
||||
except Exception:
|
||||
print("Error executing query")
|
||||
raise
|
||||
|
|
@ -0,0 +1,96 @@
|
|||
from fastapi import APIRouter
|
||||
from api.postgres_handler import PostgresHandler
|
||||
import api.pipelines as pipelines
|
||||
import api.schemas as schemas
|
||||
from api.schemas import DataSerializer
|
||||
import json
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
def parse_args_to_dict(query):
|
||||
try:
|
||||
return json.loads(query)
|
||||
except json.JSONDecodeError as e:
|
||||
return {"error": f"Invalid JSON: {e}"}
|
||||
|
||||
@router.get("/mangrove_by_country_latest")
|
||||
async def mangrove_by_country_latest():
|
||||
pipeline = pipelines.mangrove_by_country_latest()
|
||||
handler = PostgresHandler()
|
||||
|
||||
schema = schemas.mangrove_by_country_latest_schema
|
||||
serializer = DataSerializer(schema)
|
||||
rawData = handler.execute_query(pipeline)
|
||||
serializedData = serializer.serialize_many(rawData)
|
||||
return serializedData
|
||||
|
||||
@router.get("/bitcoin_business_growth_timeseries")
|
||||
async def bitcoin_business_growth_timeseries(query: str):
|
||||
args = parse_args_to_dict(query)
|
||||
|
||||
pipeline = pipelines.bitcoin_business_growth_timeseries(args)
|
||||
handler = PostgresHandler()
|
||||
|
||||
schema = schemas.bitcoin_business_growth_timeseries_schema
|
||||
serializer = DataSerializer(schema)
|
||||
|
||||
rawData = handler.execute_query(pipeline)
|
||||
serializedData = serializer.serialize_many(rawData)
|
||||
|
||||
return serializedData
|
||||
|
||||
@router.get("/bitcoin_business_growth_percent_diff")
|
||||
async def bitcoin_business_growth_percent_diff(query: str):
|
||||
args = parse_args_to_dict(query)
|
||||
|
||||
pipeline = pipelines.bitcoin_business_growth_percent_diff_days_ago(args)
|
||||
handler = PostgresHandler()
|
||||
|
||||
schema = schemas.bitcoin_business_growth_percent_diff_schema
|
||||
serializer = DataSerializer(schema)
|
||||
|
||||
rawData = handler.execute_query(pipeline)
|
||||
serializedData = serializer.serialize_many(rawData)
|
||||
return serializedData
|
||||
|
||||
# @router.get("/bitcoin_business_growth_percent_diff")
|
||||
# async def bitcoin_business_growth_percent_diff(query: str):
|
||||
# query = ast.literal_eval(query)
|
||||
#
|
||||
# query = queries.bitcoin_business_growth_percent_diff_days_ago(query)
|
||||
# handler = PostgresHandler(connection)
|
||||
#
|
||||
# schema = schemas.bitcoin_business_growth_percent_diff_schema
|
||||
# pipeline = pipelines.bitcoin_business_growth_percent_diff_days_ago(query)
|
||||
# serializer = DataSerializer(schema)
|
||||
# handler = MongoDBHandler(collection_name)
|
||||
# rawData = handler.aggregate(pipeline)
|
||||
# serializedData = serializer.serialize_many(rawData)
|
||||
# return serializedData
|
||||
# @router.get("/mangrove_by_country_agg")
|
||||
# async def mangrove_by_country_agg(query: str):
|
||||
# query = ast.literal_eval(query)
|
||||
# db = client.baseddata
|
||||
# collection_name = db["final__protected_mangroves_summary_stats_by_country_agg"]
|
||||
# schema = schemas.mangrove_by_country_agg_schema
|
||||
# pipeline = pipelines.mangrove_by_country_agg(query)
|
||||
# serializer = DataSerializer(schema)
|
||||
# handler = MongoDBHandler(collection_name)
|
||||
# rawData = handler.aggregate(pipeline)
|
||||
# serializedData = serializer.serialize_many(rawData)
|
||||
# return serializedData
|
||||
#
|
||||
|
||||
# @router.get("/bitcoin_business_growth_timeseries")
|
||||
# async def bitcoin_business_growth_timeseries(query: str):
|
||||
# query = ast.literal_eval(query)
|
||||
# db = client.baseddata
|
||||
# collection_name = db["final__bitcoin_business_growth_by_country"]
|
||||
# schema = schemas.bitcoin_business_growth_timeseries_schema
|
||||
# pipeline = pipelines.bitcoin_business_growth_timeseries(query)
|
||||
# serializer = DataSerializer(schema)
|
||||
# handler = MongoDBHandler(collection_name)
|
||||
# rawData = handler.aggregate(pipeline)
|
||||
# serializedData = serializer.serialize_many(rawData)
|
||||
# return serializedData
|
||||
|
|
@ -0,0 +1,42 @@
|
|||
def mangrove_by_country_latest_schema(data):
|
||||
return {
|
||||
"country_with_parent": str(data["country_with_parent"]),
|
||||
"original_pixels": int(data["original_pixels"]),
|
||||
"total_n_pixels": int(data["total_n_pixels"]),
|
||||
"cumulative_pixels_diff": int(data["cumulative_pixels_diff"]),
|
||||
"cumulative_pct_diff": float(data["cumulative_pct_diff"]),
|
||||
}
|
||||
|
||||
def mangrove_by_country_agg_schema(data):
|
||||
return {
|
||||
"country_with_parent": str(data["country_with_parent"]),
|
||||
"year": int(data["year"]),
|
||||
"total_pixels": int(data["total_pixels"])
|
||||
}
|
||||
|
||||
def bitcoin_business_growth_percent_diff_schema(data):
|
||||
return {
|
||||
"country_name": str(data["country_name"]),
|
||||
"date_range": str(f'{data["first_date"]} to {data["last_date"]}'),
|
||||
"first_value": int(data["first_value"]),
|
||||
"last_value": int(data["last_value"]),
|
||||
"difference": int(data["difference"]),
|
||||
"percent_difference": str(data["percent_difference"])
|
||||
}
|
||||
|
||||
def bitcoin_business_growth_timeseries_schema(data):
|
||||
return {
|
||||
"country_name": str(data["country_name"]),
|
||||
"date": data["date"],
|
||||
"cumulative_value": int(data["cumulative_value"])
|
||||
}
|
||||
|
||||
class DataSerializer:
|
||||
def __init__(self, schema_func):
|
||||
self.schema_func = schema_func
|
||||
|
||||
def serialize_one(self, data) -> dict:
|
||||
return self.schema_func(dict( data ))
|
||||
|
||||
def serialize_many(self, data_list) -> list:
|
||||
return [self.serialize_one(data) for data in data_list]
|
172
backend/app.py
172
backend/app.py
|
@ -1,172 +0,0 @@
|
|||
from flask import Flask, g, jsonify, request, json, Response, send_from_directory, abort
|
||||
from flask_cors import CORS
|
||||
import orjson, os
|
||||
|
||||
import datetime
|
||||
import time
|
||||
|
||||
app = Flask(__name__)
|
||||
CORS(app)
|
||||
|
||||
FILES_DIRECTORY = "../data/"
|
||||
|
||||
|
||||
@app.before_request
|
||||
def start_timer():
|
||||
g.start = time.time()
|
||||
|
||||
|
||||
@app.after_request
|
||||
def log(response):
|
||||
now = time.time()
|
||||
duration = round(now - g.start, 4)
|
||||
dt = datetime.datetime.fromtimestamp(now).strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
log_entry = {
|
||||
"timestamp": dt,
|
||||
"duration": duration,
|
||||
"method": request.method,
|
||||
"url": request.url,
|
||||
"status": response.status_code,
|
||||
"remote_addr": request.access_route[-1],
|
||||
"user_agent": request.user_agent.string,
|
||||
}
|
||||
|
||||
log_line = ",".join(f"{key}={value}" for key, value in log_entry.items())
|
||||
|
||||
with open("api_logs.txt", "a") as f:
|
||||
f.write(log_line + "\n")
|
||||
|
||||
return response
|
||||
|
||||
|
||||
@app.route("/bitcoin_business_growth_by_country", methods=["GET"])
|
||||
def business_growth():
|
||||
|
||||
today = datetime.datetime.today()
|
||||
|
||||
# Parse args from request
|
||||
latest_date = request.args.get("latest_date")
|
||||
country_names = request.args.get("countries") # change this line
|
||||
cumulative_period_type = request.args.get("cumulative_period_type")
|
||||
|
||||
# Open json locally
|
||||
with open("../data/final__bitcoin_business_growth_by_country.json", "rb") as f:
|
||||
data = orjson.loads(f.read())
|
||||
|
||||
# Filter based on args
|
||||
if latest_date:
|
||||
latest_date_bool = latest_date == "true"
|
||||
filtered_data = [
|
||||
item for item in data if item["latest_date"] == latest_date_bool
|
||||
]
|
||||
else:
|
||||
filtered_data = data
|
||||
|
||||
if country_names:
|
||||
countries = [name.strip() for name in country_names.split(",")]
|
||||
filtered_data = [
|
||||
item for item in filtered_data if item["country_name"] in countries
|
||||
]
|
||||
|
||||
if cumulative_period_type == "1 day":
|
||||
delta = today - datetime.timedelta(days=2)
|
||||
filtered_data = [
|
||||
item
|
||||
for item in filtered_data
|
||||
if item["cumulative_period_type"] == cumulative_period_type
|
||||
and delta <= datetime.datetime.strptime(item["date"], "%Y-%m-%d")
|
||||
]
|
||||
elif cumulative_period_type == "7 day":
|
||||
delta = today - datetime.timedelta(days=8)
|
||||
filtered_data = [
|
||||
item
|
||||
for item in filtered_data
|
||||
if item["cumulative_period_type"] == cumulative_period_type
|
||||
and delta <= datetime.datetime.strptime(item["date"], "%Y-%m-%d")
|
||||
]
|
||||
elif cumulative_period_type == "28 day":
|
||||
delta = today - datetime.timedelta(days=29)
|
||||
filtered_data = [
|
||||
item
|
||||
for item in filtered_data
|
||||
if item["cumulative_period_type"] == cumulative_period_type
|
||||
and delta <= datetime.datetime.strptime(item["date"], "%Y-%m-%d")
|
||||
]
|
||||
elif cumulative_period_type == "365 day":
|
||||
delta = today - datetime.timedelta(days=366)
|
||||
filtered_data = [
|
||||
item
|
||||
for item in filtered_data
|
||||
if item["cumulative_period_type"] == cumulative_period_type
|
||||
and delta <= datetime.datetime.strptime(item["date"], "%Y-%m-%d")
|
||||
]
|
||||
|
||||
# Sort by date
|
||||
sorted_data = sorted(filtered_data, key=lambda x: x["date"], reverse=False)
|
||||
|
||||
# Return json
|
||||
return Response(json.dumps(sorted_data), mimetype="application/json")
|
||||
|
||||
|
||||
@app.route("/get_json/<filename>", methods=["GET"])
|
||||
def get_json(filename):
|
||||
|
||||
period = request.args.get("period")
|
||||
today = datetime.datetime.today()
|
||||
|
||||
file_path = os.path.join(FILES_DIRECTORY, filename)
|
||||
if not os.path.isfile(file_path):
|
||||
abort(404)
|
||||
|
||||
with open(file_path, "r") as f:
|
||||
data = orjson.loads(f.read())
|
||||
|
||||
if period == "last 7 days":
|
||||
delta = today - datetime.timedelta(days=7)
|
||||
filtered_data = [
|
||||
item
|
||||
for item in data
|
||||
if delta <= datetime.datetime.strptime(item["date"], "%Y-%m-%d") <= today
|
||||
]
|
||||
sorted_data = sorted(filtered_data, key=lambda x: x["date"])
|
||||
elif period == "last 28 days":
|
||||
delta = today - datetime.timedelta(days=28)
|
||||
filtered_data = [
|
||||
item
|
||||
for item in data
|
||||
if delta <= datetime.datetime.strptime(item["date"], "%Y-%m-%d") <= today
|
||||
]
|
||||
sorted_data = sorted(filtered_data, key=lambda x: x["date"])
|
||||
elif period == "last 365 days":
|
||||
delta = today - datetime.timedelta(days=365)
|
||||
filtered_data = [
|
||||
item
|
||||
for item in data
|
||||
if delta <= datetime.datetime.strptime(item["date"], "%Y-%m-%d") <= today
|
||||
]
|
||||
sorted_data = sorted(filtered_data, key=lambda x: x["date"])
|
||||
elif period == "last 2 years":
|
||||
delta = today - datetime.timedelta(days=730)
|
||||
filtered_data = [
|
||||
item
|
||||
for item in data
|
||||
if delta <= datetime.datetime.strptime(item["date"], "%Y-%m-%d") <= today
|
||||
]
|
||||
sorted_data = sorted(filtered_data, key=lambda x: x["date"])
|
||||
else:
|
||||
sorted_data = sorted(data, key=lambda x: x["date"])
|
||||
|
||||
return jsonify(sorted_data)
|
||||
|
||||
|
||||
@app.route("/download/<filename>", methods=["GET"])
|
||||
def download_file(filename):
|
||||
try:
|
||||
return send_from_directory(FILES_DIRECTORY, filename, as_attachment=True)
|
||||
except FileNotFoundError:
|
||||
abort(404)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run()
|
|
@ -0,0 +1,6 @@
|
|||
from fastapi import FastAPI
|
||||
from api.route import router
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
app.include_router(router)
|
|
@ -1,42 +0,0 @@
|
|||
## Sam Chance | Analytics Engineer | Cornwall, UK
|
||||
***contact@sjplab.com***
|
||||
|
||||
4 years experience working in the tech industry as a Data Analyst and Analytics Engineer. Experienced at providing high quality consulting services to company stakeholders. Demonstrable proficiency in various programming languages. Strong educational background in a STEM field with Master's and Bachelor's degree. Excellent communication skills with competence at articulating complex ideas to non-technical people. Broad set of technical skills and talented at crafting creative solutions for novel problems.
|
||||
|
||||
## Skills
|
||||
- **Database Management**: BigQuery | Postgres | MongoDB (learning)
|
||||
- **Analytics**: Data Engineering | Data Analysis (R, Pandas, Numpy, SPSS) | Data Modelling (DBT, Python, SQL) | Data Visualization | Dashboarding (Looker Studio, Apache ECharts)
|
||||
- **Programming Languages**: proficient: SQL, Python, Bash | capable: R, Lua | learning: JavaScript, Nix
|
||||
- **Code Collaboration & Version Control**: Git, Github, Self Hosted Gitea
|
||||
- **Workflow Orchestration**: Apache Airflow | Prefect (learning)
|
||||
- **CLI wizard**: Scripting (Bash, Python) | GNU Coreutils | Neovim
|
||||
- **Geographic Information Systems**: QGIS | PostGIS | GDAL
|
||||
- **Sysadmin**: Linux | Networking | SSH | Reverse Proxy (Nginx) | API Management | Containerization (Docker, LXC) | Nixos
|
||||
- **Computers**: Cloud Computing (GCP) | Infrastructure Engineering (Homelab, Hypervisors, KVM) | x86 Architecture
|
||||
- **AI Tooling**: Self Hosted LLMs | Ollama | Langchain | Open Interpreter
|
||||
- **Web Development**: Hugo | JavaScript/HTML/CSS | Apache ECharts
|
||||
- **Personal**: Full UK Driving License | Flexible Working | Autonomous Worker
|
||||
|
||||
## Experience
|
||||
### Growth Analyst / Analytics Engineer | What3Words | May 2020 to Apr 2024
|
||||
- Designed and implemented successful bespoke alerting system from scratch to detect anomalous behaviour in partner API usage (using python)
|
||||
- General consultant to company stakeholders for resolving technical data related problems
|
||||
- Built company KPI dashboard in Looker Studio along with underlying data pipelines and data models
|
||||
- Built QA tool for Product Team to assess and monitor data quality
|
||||
- Assisted with several large data migration projects in the Data Team
|
||||
- Researched and implemented data mining techniques for Product and BD Teams
|
||||
- Started as a Growth Analyst, then promoted to Analytics Engineer
|
||||
|
||||
## Education
|
||||
### MSc Marine Biology | Bangor University | 1 year
|
||||
- Delivering technical presentations
|
||||
- Programmatic statistical analysis of data
|
||||
- **Thesis**: "Quantifying the Effectiveness of Indonesia's Protected Areas at Preventing Mangrove Deforestation"
|
||||
* A GIS project that involved statistical modelling of satellite data using ArcGIS and R
|
||||
|
||||
### BSc Marine Biology & Zoology | Bangor University | 3 years
|
||||
- **Thesis**: "Biodiversity of Seamounts"
|
||||
|
||||
## Personal Websites
|
||||
- https://baseddata.io: A site where I publish analytical content based on open data. Still a work in progress.
|
||||
- https://semitamaps.com: A free high-quality map printing service based on OpenStreetMaps.
|
|
@ -15,8 +15,10 @@ You can select the growth period of interest from the drop-down, which updates t
|
|||
The chart always reflects the countries selected in the table.
|
||||
|
||||
<br/>
|
||||
{{< chart src="/js/bitcoin-business-growth-chart.js" >}}
|
||||
{{< table src="/js/bitcoin-business-growth-table.js" >}}
|
||||
{{< dropdown_filter id="days_ago_dropdown_filter" id_filter="days_ago" options="1 day:1,7 day:7,28 day:28,1 year:365,5 year:1826,10 year:3652,all time:10000" default_selection="7 day" targets="bitcoin-business-growth-chart bitcoin-business-growth-table" >}}
|
||||
{{< table id="bitcoin-business-growth-table" endpoint="bitcoin_business_growth_percent_diff" headers="{'country_name': 'Country', 'date_range': 'Date Range', 'first_value': 'Previous #', 'last_value': 'Current #', 'difference': 'Diff', 'percent_difference': '% Diff'}" maxHeight="400px" sortable="true" valueId="country_name" selectableRows="multi" targets="bitcoin-business-growth-chart" defaultFirstSelected="true" >}}
|
||||
|
||||
{{< chart id="bitcoin-business-growth-chart" endpoint="bitcoin_business_growth_timeseries" chartType="line" xAxisField="date" yAxisField="cumulative_value" scaleChart=true >}}
|
||||
|
||||
#### Attribution and License
|
||||
Data obtained from © [OpenStreetMap](https://www.openstreetmap.org/copyright)
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
---
|
||||
title: "Global Protected Mangroves"
|
||||
date: 2024-09-04T16:00:57+01:00
|
||||
author:
|
||||
name: "Sam Chance"
|
||||
header_image: "/pics/charts/price.webp"
|
||||
summary: "Daily bitcoin price. Data is obtained from CoinGecko using their public API."
|
||||
tags: ["Bitcoin", "Stats"]
|
||||
script: "/js/mangrove-map.js"
|
||||
---
|
||||
|
||||
{{< table id="mangrove_countries" endpoint="mangrove_by_country_latest" headers="{'country_with_parent': 'Country', 'original_pixels': '1996 Cover', 'total_n_pixels': '2020 Cover', 'cumulative_pixels_diff': 'Diff', 'cumulative_pct_diff': '% Diff'}" maxHeight="400px" sortable="true" valueId="country_with_parent" selectableRows="single" defaultFirstSelected="true" >}}
|
||||
{{< chart id="mangrove_countries" endpoint="mangrove_by_country_agg" chartType="bar" xAxisField="year" yAxisField="total_pixels" scaleChart=true >}}
|
||||
{{< map id="map" style="https://tiles.semitamaps.com/styles/maptiler-basic/style.json">}}
|
||||
|
||||
{{< chart id="mangrove-country-timeseries-chart" endpoint="mangrove_country_timeseries" chartType="line" xAxisField="date" yAxisField="n_pixels" scaleChart=true >}}
|
|
@ -3,7 +3,7 @@ languageCode = 'en-gb'
|
|||
title = 'Based Data'
|
||||
|
||||
[params]
|
||||
apiURL = 'https://api.baseddata.io'
|
||||
apiURL = 'http://localhost:8000'
|
||||
|
||||
[markup.highlight]
|
||||
pygmentsUseClasses = false
|
||||
|
|
|
@ -5,3 +5,6 @@
|
|||
{{ partial "footer.html" . }}
|
||||
</div>
|
||||
</body>
|
||||
{{ if .Params.script }}
|
||||
<script src="{{ .Params.script | absURL }}"></script>
|
||||
{{ end }}
|
||||
|
|
|
@ -1,8 +1,96 @@
|
|||
<script src="https://cdn.jsdelivr.net/npm/echarts@5.5.1/dist/echarts.min.js"></script>
|
||||
<section class="chart-container">
|
||||
<div id="chart-modifiers"></div>
|
||||
<div id="chart">
|
||||
<script src="{{ .src }}"></script>
|
||||
</div>
|
||||
</section>
|
||||
<script>
|
||||
chartData = [];
|
||||
function createChart(
|
||||
id,
|
||||
endpoint,
|
||||
chartType,
|
||||
xAxisField,
|
||||
yAxisField,
|
||||
sortField = null,
|
||||
scaleChart = false,
|
||||
) {
|
||||
async function fetchDataForChart(query, valueId) {
|
||||
try {
|
||||
const apiEndpoint = `${apiURL}/${endpoint}?${query}`;
|
||||
const response = await fetch(apiEndpoint);
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP error! status: ${response.status}`);
|
||||
}
|
||||
const fetchedData = await response.json();
|
||||
const newData = fetchedData.reduce((acc, item) => {
|
||||
const objectId = item[valueId];
|
||||
if (!acc[objectId]) {
|
||||
acc[objectId] = [];
|
||||
}
|
||||
acc[objectId].push([item[xAxisField], item[yAxisField]]);
|
||||
return acc;
|
||||
}, {});
|
||||
chartData = { ...chartData, ...newData };
|
||||
updateChart();
|
||||
} catch (error) {
|
||||
console.error("Fetching data failed:", error);
|
||||
}
|
||||
}
|
||||
|
||||
function updateChart() {
|
||||
console.log(chartData);
|
||||
let chartDataMap = new Map();
|
||||
for (let objectId in chartData) {
|
||||
chartDataMap.set(objectId, chartData[objectId]);
|
||||
}
|
||||
var chartDom = document.getElementById(`${id}`);
|
||||
var myChart = echarts.init(chartDom);
|
||||
|
||||
var option = {
|
||||
tooltip: {
|
||||
...tooltip,
|
||||
valueFormatter(value, index) {
|
||||
return nFormatter(value, 0);
|
||||
},
|
||||
},
|
||||
xAxis: {
|
||||
type: "time",
|
||||
},
|
||||
yAxis: {
|
||||
scale: scaleChart,
|
||||
type: "value",
|
||||
},
|
||||
series: Array.from(chartDataMap.entries()).map(([name, data]) => ({
|
||||
name,
|
||||
type: chartType,
|
||||
data,
|
||||
showSymbol: false,
|
||||
})),
|
||||
};
|
||||
|
||||
myChart.setOption(option, true);
|
||||
}
|
||||
|
||||
// listen for filter events for this target
|
||||
document.addEventListener("filterChange", function (event) {
|
||||
tableId = document.getElementById(id).id;
|
||||
console.log(event.detail);
|
||||
eventDetail = event.detail;
|
||||
if (eventDetail.filterActions.includes("refresh")) {
|
||||
chartData = [];
|
||||
updateChart();
|
||||
} else {
|
||||
if (eventDetail.filterTargets.includes(tableId)) {
|
||||
if (eventDetail.filterActions.includes("selected")) {
|
||||
valueId = eventDetail.filterId;
|
||||
let selectedRow = {
|
||||
[valueId]: eventDetail.filterValue,
|
||||
};
|
||||
query = queryConstructor(selectedRow);
|
||||
fetchDataForChart(query, valueId);
|
||||
} else {
|
||||
delete chartData[eventDetail.filterValue];
|
||||
updateChart();
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
</script>
|
||||
<script src="/js/chart-params.js"></script>
|
||||
|
|
|
@ -8,6 +8,8 @@
|
|||
<link rel="stylesheet" href="/css/toc.css" type="text/css" media="all" />
|
||||
<link rel="stylesheet" href="/css/articles.css" type="text/css" media="all" />
|
||||
<link rel="stylesheet" href="/css/charts.css" type="text/css" media="all" />
|
||||
<script src="/js/lib/sorttable.js"></script>
|
||||
<script src="/js/lib/helper-functions.js"></script>
|
||||
<link
|
||||
rel="stylesheet"
|
||||
href="/css/codeblock.css"
|
||||
|
@ -20,4 +22,7 @@
|
|||
href="https://fonts.googleapis.com/css2?family=Fira+Mono:wght@400;500;700&family=Montserrat:ital,wght@0,100..900;1,100..900&display=swap"
|
||||
rel="stylesheet"
|
||||
/>
|
||||
<script>
|
||||
const apiURL = "{{ .Site.Params.apiURL }}";
|
||||
</script>
|
||||
</html>
|
||||
|
|
|
@ -1,2 +1,142 @@
|
|||
<script src="{{ .src }}"></script>
|
||||
<table id="jsonTableContainer"></table>
|
||||
<script>
|
||||
function createTable(
|
||||
endpoint,
|
||||
id,
|
||||
headers,
|
||||
maxHeight,
|
||||
sortable,
|
||||
valueId,
|
||||
selectableRows,
|
||||
filterTargets,
|
||||
defaultFirstSelected,
|
||||
) {
|
||||
async function fetchDataForTable(query) {
|
||||
try {
|
||||
const apiEndpoint = `${apiURL}/${endpoint}?${query}`;
|
||||
const response = await fetch(apiEndpoint);
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP error! status: ${response.status}`);
|
||||
}
|
||||
const fetchedData = await response.json();
|
||||
data = fetchedData;
|
||||
generateTable(data);
|
||||
} catch (error) {
|
||||
console.error("Fetching data failed:", error);
|
||||
}
|
||||
}
|
||||
|
||||
function generateTable(data) {
|
||||
const jsonTableContainer = document.getElementById(`${id}--container`);
|
||||
jsonTableContainer.className = "jsonTableContainer";
|
||||
jsonTableContainer.innerHTML = "";
|
||||
jsonTableContainer.style.maxHeight = maxHeight;
|
||||
|
||||
tableHeaderNames = Object.values(headers);
|
||||
tableHeaderKeys = Object.keys(headers);
|
||||
|
||||
const table = document.createElement("table");
|
||||
table.id = `${id}`;
|
||||
const thead = document.createElement("thead");
|
||||
const tbody = document.createElement("tbody");
|
||||
const headerRow = document.createElement("tr");
|
||||
tableHeaderNames.forEach((header) => {
|
||||
const th = document.createElement("th");
|
||||
th.textContent = header;
|
||||
headerRow.appendChild(th);
|
||||
});
|
||||
thead.appendChild(headerRow);
|
||||
table.appendChild(thead);
|
||||
|
||||
for (const key in data) {
|
||||
const row = document.createElement("tr");
|
||||
row.value = data[key][valueId];
|
||||
tableHeaderKeys.forEach((columnName) => {
|
||||
const td = document.createElement("td");
|
||||
const div = document.createElement("div");
|
||||
div.id = "scrollable";
|
||||
div.textContent = data[key][columnName];
|
||||
td.appendChild(div);
|
||||
row.appendChild(td);
|
||||
tbody.appendChild(row);
|
||||
});
|
||||
}
|
||||
table.appendChild(thead);
|
||||
table.appendChild(tbody);
|
||||
jsonTableContainer.appendChild(table);
|
||||
|
||||
// sortable
|
||||
if (sortable == "true") {
|
||||
table.className = "sortable";
|
||||
sorttable.makeSortable(document.getElementById(`${id}`));
|
||||
}
|
||||
|
||||
if (selectableRows === "multi" || selectableRows === "single") {
|
||||
const rows = table.getElementsByTagName("tr");
|
||||
for (let i = 1; i < rows.length; i++) {
|
||||
rows[i].addEventListener("click", function () {
|
||||
if (selectableRows === "multi") {
|
||||
this.classList.toggle("selected");
|
||||
if (this.classList.contains("selected")) {
|
||||
const event = new CustomEvent("filterChange", {
|
||||
detail: {
|
||||
filterId: valueId,
|
||||
filterValue: this.value,
|
||||
filterActions: ["selected"],
|
||||
filterTargets: filterTargets,
|
||||
},
|
||||
});
|
||||
document.dispatchEvent(event);
|
||||
} else {
|
||||
const event = new CustomEvent("filterChange", {
|
||||
detail: {
|
||||
filterId: valueId,
|
||||
filterValue: this.value,
|
||||
filterActions: ["deselected"],
|
||||
filterTargets: filterTargets,
|
||||
},
|
||||
});
|
||||
document.dispatchEvent(event);
|
||||
}
|
||||
} else if (selectableRows === "single") {
|
||||
if (this.classList.contains("selected")) {
|
||||
this.classList.remove("selected");
|
||||
} else {
|
||||
for (let j = 1; j < rows.length; j++) {
|
||||
rows[j].classList.remove("selected");
|
||||
}
|
||||
this.classList.add("selected");
|
||||
}
|
||||
}
|
||||
});
|
||||
if (defaultFirstSelected == true) {
|
||||
if (i == 1) {
|
||||
rows[i].classList.add("selected");
|
||||
const event = new CustomEvent("filterChange", {
|
||||
detail: {
|
||||
filterId: valueId,
|
||||
filterValue: rows[i].value,
|
||||
filterActions: ["selected"],
|
||||
filterTargets: filterTargets,
|
||||
},
|
||||
});
|
||||
document.dispatchEvent(event);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// listen for filter events for this target
|
||||
document.addEventListener("filterChange", function (event) {
|
||||
tableId = document.getElementById(id).id;
|
||||
if (event.detail.filterTargets.includes(tableId)) {
|
||||
query = queryConstructor();
|
||||
fetchDataForTable(query);
|
||||
}
|
||||
});
|
||||
|
||||
query = queryConstructor();
|
||||
fetchDataForTable(query);
|
||||
}
|
||||
</script>
|
||||
|
|
|
@ -1,5 +1,10 @@
|
|||
{{ partial "chart.html" }}
|
||||
<section class = 'chart-container'>
|
||||
<div class = "chart" id='{{ .Get "id" }}'>
|
||||
<script>
|
||||
const apiURL = "{{ .Site.Params.apiURL }}";
|
||||
document.addEventListener("DOMContentLoaded", function () {
|
||||
createChart(id={{ .Get "id" }}, endpoint={{ .Get "endpoint" }}, chartType={{ .Get "chartType" }}, xAxisField={{ .Get "xAxisField" }}, yAxisField={{ .Get "yAxisField" }}, sortField={{ .Get "sortField" }}, scaleChart={{ .Get "scaleChart" }})
|
||||
});
|
||||
</script>
|
||||
{{ $id := .Get "src" | md5 }} {{ partial "chart.html" (dict "src" (.Get "src")
|
||||
"id" $id) }}
|
||||
</div>
|
||||
</section>
|
||||
|
|
|
@ -5,7 +5,9 @@
|
|||
<script>
|
||||
async function downloadFile(url, name) {
|
||||
try {
|
||||
const response = await fetch(`${"{{ .Site.Params.apiURL }}"}/download${url}`);
|
||||
downloadUrl = `${"{{ .Site.Params.apiURL }}"}/download${url}`
|
||||
console.log("Downloading from url:",downloadUrl)
|
||||
const response = await fetch(downloadUrl);
|
||||
if (!response.ok) {
|
||||
throw new Error('Network response was not ok');
|
||||
}
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
{{ $id := .Get "id" }}
|
||||
{{ $default_selection := .Get "default_selection" }}
|
||||
{{ $options := .Get "options" }}
|
||||
|
||||
{{ $options_split := split $options "," }}
|
||||
|
||||
<div class="dropdown-filter-container">
|
||||
<select class="filter dropdown-filter" id="{{ $id }}" idFilter='{{ .Get "id_filter" }}' onchange="dispatchDropdownEvent(this)">
|
||||
{{ range $options_split }}
|
||||
{{ $parts := split . ":" }}
|
||||
{{ $key := index $parts 0 }}
|
||||
{{ $value := index $parts 1 }}
|
||||
<option value="{{ $value }}" {{ if eq $key $default_selection }}selected{{ end }}>{{ $key }}</option>
|
||||
{{ end }}
|
||||
</select>
|
||||
<script>
|
||||
function dispatchDropdownEvent(selectElement) {
|
||||
const event = new CustomEvent('filterChange', {
|
||||
detail: {
|
||||
filterId: '{{ .Get "id_filter" }}',
|
||||
filterValue: selectElement.value,
|
||||
filterActions: ["refresh"],
|
||||
filterTargets: '{{ .Get "targets" }}'.split(" ")
|
||||
}
|
||||
});
|
||||
document.dispatchEvent(event);
|
||||
}
|
||||
</script>
|
||||
</div>
|
|
@ -0,0 +1,15 @@
|
|||
<script src="https://labs.geomatico.es/maplibre-cog-protocol/dist/index.js"></script>
|
||||
<script src="https://unpkg.com/maplibre-gl/dist/maplibre-gl.js"></script>
|
||||
<link
|
||||
rel="stylesheet"
|
||||
href="https://unpkg.com/maplibre-gl/dist/maplibre-gl.css"
|
||||
/>
|
||||
<section class="map-container">
|
||||
<div id="{{ .Get `id` }}" style="height: 400px"></div>
|
||||
<script>
|
||||
let map = new maplibregl.Map({
|
||||
container: "{{ .Get `id` }}",
|
||||
style: "{{ .Get `style` }}",
|
||||
});
|
||||
</script>
|
||||
</section>
|
|
@ -1,2 +1,8 @@
|
|||
{{ $id := .Get "src" | md5 }}
|
||||
{{ partial "table.html" (dict "src" (.Get "src") "id" $id) }}
|
||||
{{ partial "table.html" }}
|
||||
<div id = '{{ .Get "id" }}--container'>
|
||||
<script>
|
||||
document.addEventListener("DOMContentLoaded", function () {
|
||||
createTable({{ .Get "endpoint" }}, {{ .Get "id" }}, {{ .Get "headers" | safeJS }}, {{ .Get "maxHeight" }}, {{ .Get "sortable" }}, {{ .Get "valueId" }}, {{ .Get "selectableRows" }}, '{{ .Get "targets" }}'.split(" "), {{ .Get "defaultFirstSelected" | safeJS }})
|
||||
});
|
||||
</script>
|
||||
</div>
|
||||
|
|
|
@ -0,0 +1,22 @@
|
|||
[tool.poetry]
|
||||
name = "baseddata-io"
|
||||
version = "0.1.0"
|
||||
description = ""
|
||||
authors = ["Sam <samual.shop@proton.me>"]
|
||||
readme = "README.md"
|
||||
packages = [{include = "baseddata"}]
|
||||
package-mode = false
|
||||
|
||||
[virtualenvs]
|
||||
in-project = true
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.11"
|
||||
fastapi = "^0.115.4"
|
||||
uvicorn = "^0.32.0"
|
||||
psycopg2 = "^2.9.10"
|
||||
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
24
shell.nix
24
shell.nix
|
@ -1,20 +1,19 @@
|
|||
{ pkgs ? import <nixpkgs> { } }:
|
||||
|
||||
{pkgs ? import <nixpkgs> {}}:
|
||||
pkgs.mkShell
|
||||
{
|
||||
nativeBuildInputs = with pkgs; [
|
||||
python312Packages.flask
|
||||
python312Packages.flask-cors
|
||||
python312Packages.requests
|
||||
python312Packages.pandas
|
||||
python312Packages.orjson
|
||||
hugo
|
||||
tmux
|
||||
python311
|
||||
poetry
|
||||
];
|
||||
|
||||
shellHook = ''
|
||||
NIX_LD_LIBRARY_PATH=/run/current-system/sw/share/nix-ld/lib;
|
||||
NIX_LD=/run/current-system/sw/share/nix-ld/lib/ld.so;
|
||||
shellHook = ''
|
||||
${pkgs.cowsay}/bin/cowsay "Welcome to the baseddata.io development environment!" | ${pkgs.lolcat}/bin/lolcat
|
||||
|
||||
export LD_LIBRARY_PATH=$NIX_LD_LIBRARY_PATH
|
||||
source .env
|
||||
source .venv/bin/activate
|
||||
get_session=$(tmux list-session | grep "baseddata")
|
||||
|
||||
if [ -z "$get_session" ];
|
||||
|
@ -22,9 +21,8 @@ pkgs.mkShell
|
|||
tmux new-session -d -s baseddata
|
||||
tmux split-window -h
|
||||
tmux send-keys -t 0 "hugo server" C-m
|
||||
tmux send-keys -t 1 "cd backend && python app.py" C-m
|
||||
tmux send-keys -t 1 "cd backend && uvicorn main:app --reload" C-m
|
||||
echo "Baseddata running in dev tmux shell"
|
||||
fi
|
||||
'';
|
||||
'';
|
||||
}
|
||||
|
||||
|
|
|
@ -1,24 +1,12 @@
|
|||
/* Charts */
|
||||
.chart-flex-container {
|
||||
display: flex;
|
||||
}
|
||||
|
||||
.chart-flex-container article {
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
#chart {
|
||||
width: 100%;
|
||||
aspect-ratio: 16 / 9;
|
||||
}
|
||||
|
||||
.chart-container {
|
||||
margin-top: 20px;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
flex-direction: column;
|
||||
/* height: 600px; */
|
||||
aspect-ratio: 1 / 1;
|
||||
}
|
||||
|
||||
#chart-modifiers {
|
||||
.chart {
|
||||
display: flex;
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
}
|
||||
|
|
|
@ -70,17 +70,18 @@
|
|||
|
||||
/* Style for WebKit browsers (Chrome, Safari, etc.) */
|
||||
*::-webkit-scrollbar {
|
||||
height: 5px;
|
||||
border-radius: 6px;
|
||||
height: 2px;
|
||||
border-radius: 1px;
|
||||
background: #1f1f28;
|
||||
}
|
||||
|
||||
*::-webkit-scrollbar-thumb {
|
||||
border-radius: 6px;
|
||||
border-radius: 2px;
|
||||
background: #585653;
|
||||
}
|
||||
|
||||
*::-webkit-scrollbar-thumb:hover {
|
||||
height: 2px;
|
||||
background: #454445; /* Background color on hover */
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,11 @@
|
|||
/* Tables */
|
||||
#jsonTableContainer {
|
||||
max-width: 150%;
|
||||
.jsonTableContainer {
|
||||
display: flex;
|
||||
overflow-y: auto;
|
||||
}
|
||||
|
||||
table.sortable th:not(.sorttable_sorted):not(.sorttable_sorted_reverse):not(.sorttable_nosort):after {
|
||||
content: " \25B4\25BE"
|
||||
}
|
||||
|
||||
table {
|
||||
|
@ -23,9 +28,13 @@ td {
|
|||
max-width: 90px;
|
||||
font-size: var(--table-row-font-size);
|
||||
}
|
||||
#scrollable:hover {
|
||||
scrollbar-width: thin;
|
||||
}
|
||||
|
||||
@media only screen and (max-width: 800px) {
|
||||
#scrollable {
|
||||
scrollbar-width: thin;
|
||||
max-width: 90px;
|
||||
max-height: 100%;
|
||||
margin: 0;
|
||||
|
@ -44,3 +53,10 @@ tr:nth-child(odd) {
|
|||
background-color: var(--table-odd-row-bg-color);
|
||||
font-size: var(--table-row-font-size);
|
||||
}
|
||||
tr:hover {
|
||||
background-color: #f5f5f5;
|
||||
}
|
||||
|
||||
tr.selected {
|
||||
background-color: #d1ecf1;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
function queryConstructor(customFilters = {}) {
|
||||
let filters = document.querySelectorAll(".filter");
|
||||
let queryObject = {};
|
||||
|
||||
Object.assign(queryObject, customFilters);
|
||||
|
||||
filters.forEach((filter) => {
|
||||
const filterId = filter.getAttribute("idFilter");
|
||||
const filterValue = filter.value;
|
||||
queryObject[filterId] = filterValue;
|
||||
});
|
||||
|
||||
let queryString = `query=${JSON.stringify(queryObject)}`;
|
||||
|
||||
return queryString;
|
||||
}
|
|
@ -0,0 +1,495 @@
|
|||
/*
|
||||
SortTable
|
||||
version 2
|
||||
7th April 2007
|
||||
Stuart Langridge, http://www.kryogenix.org/code/browser/sorttable/
|
||||
|
||||
Instructions:
|
||||
Download this file
|
||||
Add <script src="sorttable.js"></script> to your HTML
|
||||
Add class="sortable" to any table you'd like to make sortable
|
||||
Click on the headers to sort
|
||||
|
||||
Thanks to many, many people for contributions and suggestions.
|
||||
Licenced as X11: http://www.kryogenix.org/code/browser/licence.html
|
||||
This basically means: do what you want with it.
|
||||
*/
|
||||
|
||||
|
||||
var stIsIE = /*@cc_on!@*/false;
|
||||
|
||||
sorttable = {
|
||||
init: function() {
|
||||
// quit if this function has already been called
|
||||
if (arguments.callee.done) return;
|
||||
// flag this function so we don't do the same thing twice
|
||||
arguments.callee.done = true;
|
||||
// kill the timer
|
||||
if (_timer) clearInterval(_timer);
|
||||
|
||||
if (!document.createElement || !document.getElementsByTagName) return;
|
||||
|
||||
sorttable.DATE_RE = /^(\d\d?)[\/\.-](\d\d?)[\/\.-]((\d\d)?\d\d)$/;
|
||||
|
||||
forEach(document.getElementsByTagName('table'), function(table) {
|
||||
if (table.className.search(/\bsortable\b/) != -1) {
|
||||
sorttable.makeSortable(table);
|
||||
}
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
makeSortable: function(table) {
|
||||
if (table.getElementsByTagName('thead').length == 0) {
|
||||
// table doesn't have a tHead. Since it should have, create one and
|
||||
// put the first table row in it.
|
||||
the = document.createElement('thead');
|
||||
the.appendChild(table.rows[0]);
|
||||
table.insertBefore(the,table.firstChild);
|
||||
}
|
||||
// Safari doesn't support table.tHead, sigh
|
||||
if (table.tHead == null) table.tHead = table.getElementsByTagName('thead')[0];
|
||||
|
||||
if (table.tHead.rows.length != 1) return; // can't cope with two header rows
|
||||
|
||||
// Sorttable v1 put rows with a class of "sortbottom" at the bottom (as
|
||||
// "total" rows, for example). This is B&R, since what you're supposed
|
||||
// to do is put them in a tfoot. So, if there are sortbottom rows,
|
||||
// for backwards compatibility, move them to tfoot (creating it if needed).
|
||||
sortbottomrows = [];
|
||||
for (var i=0; i<table.rows.length; i++) {
|
||||
if (table.rows[i].className.search(/\bsortbottom\b/) != -1) {
|
||||
sortbottomrows[sortbottomrows.length] = table.rows[i];
|
||||
}
|
||||
}
|
||||
if (sortbottomrows) {
|
||||
if (table.tFoot == null) {
|
||||
// table doesn't have a tfoot. Create one.
|
||||
tfo = document.createElement('tfoot');
|
||||
table.appendChild(tfo);
|
||||
}
|
||||
for (var i=0; i<sortbottomrows.length; i++) {
|
||||
tfo.appendChild(sortbottomrows[i]);
|
||||
}
|
||||
delete sortbottomrows;
|
||||
}
|
||||
|
||||
// work through each column and calculate its type
|
||||
headrow = table.tHead.rows[0].cells;
|
||||
for (var i=0; i<headrow.length; i++) {
|
||||
// manually override the type with a sorttable_type attribute
|
||||
if (!headrow[i].className.match(/\bsorttable_nosort\b/)) { // skip this col
|
||||
mtch = headrow[i].className.match(/\bsorttable_([a-z0-9]+)\b/);
|
||||
if (mtch) { override = mtch[1]; }
|
||||
if (mtch && typeof sorttable["sort_"+override] == 'function') {
|
||||
headrow[i].sorttable_sortfunction = sorttable["sort_"+override];
|
||||
} else {
|
||||
headrow[i].sorttable_sortfunction = sorttable.guessType(table,i);
|
||||
}
|
||||
// make it clickable to sort
|
||||
headrow[i].sorttable_columnindex = i;
|
||||
headrow[i].sorttable_tbody = table.tBodies[0];
|
||||
dean_addEvent(headrow[i],"click", sorttable.innerSortFunction = function(e) {
|
||||
|
||||
if (this.className.search(/\bsorttable_sorted\b/) != -1) {
|
||||
// if we're already sorted by this column, just
|
||||
// reverse the table, which is quicker
|
||||
sorttable.reverse(this.sorttable_tbody);
|
||||
this.className = this.className.replace('sorttable_sorted',
|
||||
'sorttable_sorted_reverse');
|
||||
this.removeChild(document.getElementById('sorttable_sortfwdind'));
|
||||
sortrevind = document.createElement('span');
|
||||
sortrevind.id = "sorttable_sortrevind";
|
||||
sortrevind.innerHTML = stIsIE ? ' <font face="webdings">5</font>' : ' ▴';
|
||||
this.appendChild(sortrevind);
|
||||
return;
|
||||
}
|
||||
if (this.className.search(/\bsorttable_sorted_reverse\b/) != -1) {
|
||||
// if we're already sorted by this column in reverse, just
|
||||
// re-reverse the table, which is quicker
|
||||
sorttable.reverse(this.sorttable_tbody);
|
||||
this.className = this.className.replace('sorttable_sorted_reverse',
|
||||
'sorttable_sorted');
|
||||
this.removeChild(document.getElementById('sorttable_sortrevind'));
|
||||
sortfwdind = document.createElement('span');
|
||||
sortfwdind.id = "sorttable_sortfwdind";
|
||||
sortfwdind.innerHTML = stIsIE ? ' <font face="webdings">6</font>' : ' ▾';
|
||||
this.appendChild(sortfwdind);
|
||||
return;
|
||||
}
|
||||
|
||||
// remove sorttable_sorted classes
|
||||
theadrow = this.parentNode;
|
||||
forEach(theadrow.childNodes, function(cell) {
|
||||
if (cell.nodeType == 1) { // an element
|
||||
cell.className = cell.className.replace('sorttable_sorted_reverse','');
|
||||
cell.className = cell.className.replace('sorttable_sorted','');
|
||||
}
|
||||
});
|
||||
sortfwdind = document.getElementById('sorttable_sortfwdind');
|
||||
if (sortfwdind) { sortfwdind.parentNode.removeChild(sortfwdind); }
|
||||
sortrevind = document.getElementById('sorttable_sortrevind');
|
||||
if (sortrevind) { sortrevind.parentNode.removeChild(sortrevind); }
|
||||
|
||||
this.className += ' sorttable_sorted';
|
||||
sortfwdind = document.createElement('span');
|
||||
sortfwdind.id = "sorttable_sortfwdind";
|
||||
sortfwdind.innerHTML = stIsIE ? ' <font face="webdings">6</font>' : ' ▾';
|
||||
this.appendChild(sortfwdind);
|
||||
|
||||
// build an array to sort. This is a Schwartzian transform thing,
|
||||
// i.e., we "decorate" each row with the actual sort key,
|
||||
// sort based on the sort keys, and then put the rows back in order
|
||||
// which is a lot faster because you only do getInnerText once per row
|
||||
row_array = [];
|
||||
col = this.sorttable_columnindex;
|
||||
rows = this.sorttable_tbody.rows;
|
||||
for (var j=0; j<rows.length; j++) {
|
||||
row_array[row_array.length] = [sorttable.getInnerText(rows[j].cells[col]), rows[j]];
|
||||
}
|
||||
/* If you want a stable sort, uncomment the following line */
|
||||
//sorttable.shaker_sort(row_array, this.sorttable_sortfunction);
|
||||
/* and comment out this one */
|
||||
row_array.sort(this.sorttable_sortfunction);
|
||||
|
||||
tb = this.sorttable_tbody;
|
||||
for (var j=0; j<row_array.length; j++) {
|
||||
tb.appendChild(row_array[j][1]);
|
||||
}
|
||||
|
||||
delete row_array;
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
guessType: function(table, column) {
|
||||
// guess the type of a column based on its first non-blank row
|
||||
sortfn = sorttable.sort_alpha;
|
||||
for (var i=0; i<table.tBodies[0].rows.length; i++) {
|
||||
text = sorttable.getInnerText(table.tBodies[0].rows[i].cells[column]);
|
||||
if (text != '') {
|
||||
if (text.match(/^-?[£$¤]?[\d,.]+%?$/)) {
|
||||
return sorttable.sort_numeric;
|
||||
}
|
||||
// check for a date: dd/mm/yyyy or dd/mm/yy
|
||||
// can have / or . or - as separator
|
||||
// can be mm/dd as well
|
||||
possdate = text.match(sorttable.DATE_RE)
|
||||
if (possdate) {
|
||||
// looks like a date
|
||||
first = parseInt(possdate[1]);
|
||||
second = parseInt(possdate[2]);
|
||||
if (first > 12) {
|
||||
// definitely dd/mm
|
||||
return sorttable.sort_ddmm;
|
||||
} else if (second > 12) {
|
||||
return sorttable.sort_mmdd;
|
||||
} else {
|
||||
// looks like a date, but we can't tell which, so assume
|
||||
// that it's dd/mm (English imperialism!) and keep looking
|
||||
sortfn = sorttable.sort_ddmm;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return sortfn;
|
||||
},
|
||||
|
||||
getInnerText: function(node) {
|
||||
// gets the text we want to use for sorting for a cell.
|
||||
// strips leading and trailing whitespace.
|
||||
// this is *not* a generic getInnerText function; it's special to sorttable.
|
||||
// for example, you can override the cell text with a customkey attribute.
|
||||
// it also gets .value for <input> fields.
|
||||
|
||||
if (!node) return "";
|
||||
|
||||
hasInputs = (typeof node.getElementsByTagName == 'function') &&
|
||||
node.getElementsByTagName('input').length;
|
||||
|
||||
if (node.getAttribute("sorttable_customkey") != null) {
|
||||
return node.getAttribute("sorttable_customkey");
|
||||
}
|
||||
else if (typeof node.textContent != 'undefined' && !hasInputs) {
|
||||
return node.textContent.replace(/^\s+|\s+$/g, '');
|
||||
}
|
||||
else if (typeof node.innerText != 'undefined' && !hasInputs) {
|
||||
return node.innerText.replace(/^\s+|\s+$/g, '');
|
||||
}
|
||||
else if (typeof node.text != 'undefined' && !hasInputs) {
|
||||
return node.text.replace(/^\s+|\s+$/g, '');
|
||||
}
|
||||
else {
|
||||
switch (node.nodeType) {
|
||||
case 3:
|
||||
if (node.nodeName.toLowerCase() == 'input') {
|
||||
return node.value.replace(/^\s+|\s+$/g, '');
|
||||
}
|
||||
case 4:
|
||||
return node.nodeValue.replace(/^\s+|\s+$/g, '');
|
||||
break;
|
||||
case 1:
|
||||
case 11:
|
||||
var innerText = '';
|
||||
for (var i = 0; i < node.childNodes.length; i++) {
|
||||
innerText += sorttable.getInnerText(node.childNodes[i]);
|
||||
}
|
||||
return innerText.replace(/^\s+|\s+$/g, '');
|
||||
break;
|
||||
default:
|
||||
return '';
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
reverse: function(tbody) {
|
||||
// reverse the rows in a tbody
|
||||
newrows = [];
|
||||
for (var i=0; i<tbody.rows.length; i++) {
|
||||
newrows[newrows.length] = tbody.rows[i];
|
||||
}
|
||||
for (var i=newrows.length-1; i>=0; i--) {
|
||||
tbody.appendChild(newrows[i]);
|
||||
}
|
||||
delete newrows;
|
||||
},
|
||||
|
||||
/* sort functions
|
||||
each sort function takes two parameters, a and b
|
||||
you are comparing a[0] and b[0] */
|
||||
sort_numeric: function(a,b) {
|
||||
aa = parseFloat(a[0].replace(/[^0-9.-]/g,''));
|
||||
if (isNaN(aa)) aa = 0;
|
||||
bb = parseFloat(b[0].replace(/[^0-9.-]/g,''));
|
||||
if (isNaN(bb)) bb = 0;
|
||||
return aa-bb;
|
||||
},
|
||||
sort_alpha: function(a,b) {
|
||||
if (a[0]==b[0]) return 0;
|
||||
if (a[0]<b[0]) return -1;
|
||||
return 1;
|
||||
},
|
||||
sort_ddmm: function(a,b) {
|
||||
mtch = a[0].match(sorttable.DATE_RE);
|
||||
y = mtch[3]; m = mtch[2]; d = mtch[1];
|
||||
if (m.length == 1) m = '0'+m;
|
||||
if (d.length == 1) d = '0'+d;
|
||||
dt1 = y+m+d;
|
||||
mtch = b[0].match(sorttable.DATE_RE);
|
||||
y = mtch[3]; m = mtch[2]; d = mtch[1];
|
||||
if (m.length == 1) m = '0'+m;
|
||||
if (d.length == 1) d = '0'+d;
|
||||
dt2 = y+m+d;
|
||||
if (dt1==dt2) return 0;
|
||||
if (dt1<dt2) return -1;
|
||||
return 1;
|
||||
},
|
||||
sort_mmdd: function(a,b) {
|
||||
mtch = a[0].match(sorttable.DATE_RE);
|
||||
y = mtch[3]; d = mtch[2]; m = mtch[1];
|
||||
if (m.length == 1) m = '0'+m;
|
||||
if (d.length == 1) d = '0'+d;
|
||||
dt1 = y+m+d;
|
||||
mtch = b[0].match(sorttable.DATE_RE);
|
||||
y = mtch[3]; d = mtch[2]; m = mtch[1];
|
||||
if (m.length == 1) m = '0'+m;
|
||||
if (d.length == 1) d = '0'+d;
|
||||
dt2 = y+m+d;
|
||||
if (dt1==dt2) return 0;
|
||||
if (dt1<dt2) return -1;
|
||||
return 1;
|
||||
},
|
||||
|
||||
shaker_sort: function(list, comp_func) {
|
||||
// A stable sort function to allow multi-level sorting of data
|
||||
// see: http://en.wikipedia.org/wiki/Cocktail_sort
|
||||
// thanks to Joseph Nahmias
|
||||
var b = 0;
|
||||
var t = list.length - 1;
|
||||
var swap = true;
|
||||
|
||||
while(swap) {
|
||||
swap = false;
|
||||
for(var i = b; i < t; ++i) {
|
||||
if ( comp_func(list[i], list[i+1]) > 0 ) {
|
||||
var q = list[i]; list[i] = list[i+1]; list[i+1] = q;
|
||||
swap = true;
|
||||
}
|
||||
} // for
|
||||
t--;
|
||||
|
||||
if (!swap) break;
|
||||
|
||||
for(var i = t; i > b; --i) {
|
||||
if ( comp_func(list[i], list[i-1]) < 0 ) {
|
||||
var q = list[i]; list[i] = list[i-1]; list[i-1] = q;
|
||||
swap = true;
|
||||
}
|
||||
} // for
|
||||
b++;
|
||||
|
||||
} // while(swap)
|
||||
}
|
||||
}
|
||||
|
||||
/* ******************************************************************
|
||||
Supporting functions: bundled here to avoid depending on a library
|
||||
****************************************************************** */
|
||||
|
||||
// Dean Edwards/Matthias Miller/John Resig
|
||||
|
||||
/* for Mozilla/Opera9 */
|
||||
if (document.addEventListener) {
|
||||
document.addEventListener("DOMContentLoaded", sorttable.init, false);
|
||||
}
|
||||
|
||||
/* for Internet Explorer */
|
||||
/*@cc_on @*/
|
||||
/*@if (@_win32)
|
||||
document.write("<script id=__ie_onload defer src=javascript:void(0)><\/script>");
|
||||
var script = document.getElementById("__ie_onload");
|
||||
script.onreadystatechange = function() {
|
||||
if (this.readyState == "complete") {
|
||||
sorttable.init(); // call the onload handler
|
||||
}
|
||||
};
|
||||
/*@end @*/
|
||||
|
||||
/* for Safari */
|
||||
if (/WebKit/i.test(navigator.userAgent)) { // sniff
|
||||
var _timer = setInterval(function() {
|
||||
if (/loaded|complete/.test(document.readyState)) {
|
||||
sorttable.init(); // call the onload handler
|
||||
}
|
||||
}, 10);
|
||||
}
|
||||
|
||||
/* for other browsers */
|
||||
window.onload = sorttable.init;
|
||||
|
||||
// written by Dean Edwards, 2005
|
||||
// with input from Tino Zijdel, Matthias Miller, Diego Perini
|
||||
|
||||
// http://dean.edwards.name/weblog/2005/10/add-event/
|
||||
|
||||
function dean_addEvent(element, type, handler) {
|
||||
if (element.addEventListener) {
|
||||
element.addEventListener(type, handler, false);
|
||||
} else {
|
||||
// assign each event handler a unique ID
|
||||
if (!handler.$$guid) handler.$$guid = dean_addEvent.guid++;
|
||||
// create a hash table of event types for the element
|
||||
if (!element.events) element.events = {};
|
||||
// create a hash table of event handlers for each element/event pair
|
||||
var handlers = element.events[type];
|
||||
if (!handlers) {
|
||||
handlers = element.events[type] = {};
|
||||
// store the existing event handler (if there is one)
|
||||
if (element["on" + type]) {
|
||||
handlers[0] = element["on" + type];
|
||||
}
|
||||
}
|
||||
// store the event handler in the hash table
|
||||
handlers[handler.$$guid] = handler;
|
||||
// assign a global event handler to do all the work
|
||||
element["on" + type] = handleEvent;
|
||||
}
|
||||
};
|
||||
// a counter used to create unique IDs
|
||||
dean_addEvent.guid = 1;
|
||||
|
||||
function removeEvent(element, type, handler) {
|
||||
if (element.removeEventListener) {
|
||||
element.removeEventListener(type, handler, false);
|
||||
} else {
|
||||
// delete the event handler from the hash table
|
||||
if (element.events && element.events[type]) {
|
||||
delete element.events[type][handler.$$guid];
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
function handleEvent(event) {
|
||||
var returnValue = true;
|
||||
// grab the event object (IE uses a global event object)
|
||||
event = event || fixEvent(((this.ownerDocument || this.document || this).parentWindow || window).event);
|
||||
// get a reference to the hash table of event handlers
|
||||
var handlers = this.events[event.type];
|
||||
// execute each event handler
|
||||
for (var i in handlers) {
|
||||
this.$$handleEvent = handlers[i];
|
||||
if (this.$$handleEvent(event) === false) {
|
||||
returnValue = false;
|
||||
}
|
||||
}
|
||||
return returnValue;
|
||||
};
|
||||
|
||||
function fixEvent(event) {
|
||||
// add W3C standard event methods
|
||||
event.preventDefault = fixEvent.preventDefault;
|
||||
event.stopPropagation = fixEvent.stopPropagation;
|
||||
return event;
|
||||
};
|
||||
fixEvent.preventDefault = function() {
|
||||
this.returnValue = false;
|
||||
};
|
||||
fixEvent.stopPropagation = function() {
|
||||
this.cancelBubble = true;
|
||||
}
|
||||
|
||||
// Dean's forEach: http://dean.edwards.name/base/forEach.js
|
||||
/*
|
||||
forEach, version 1.0
|
||||
Copyright 2006, Dean Edwards
|
||||
License: http://www.opensource.org/licenses/mit-license.php
|
||||
*/
|
||||
|
||||
// array-like enumeration
|
||||
if (!Array.forEach) { // mozilla already supports this
|
||||
Array.forEach = function(array, block, context) {
|
||||
for (var i = 0; i < array.length; i++) {
|
||||
block.call(context, array[i], i, array);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// generic enumeration
|
||||
Function.prototype.forEach = function(object, block, context) {
|
||||
for (var key in object) {
|
||||
if (typeof this.prototype[key] == "undefined") {
|
||||
block.call(context, object[key], key, object);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// character enumeration
|
||||
String.forEach = function(string, block, context) {
|
||||
Array.forEach(string.split(""), function(chr, index) {
|
||||
block.call(context, chr, index, string);
|
||||
});
|
||||
};
|
||||
|
||||
// globally resolve forEach enumeration
|
||||
var forEach = function(object, block, context) {
|
||||
if (object) {
|
||||
var resolve = Object; // default
|
||||
if (object instanceof Function) {
|
||||
// functions have a "length" property
|
||||
resolve = Function;
|
||||
} else if (object.forEach instanceof Function) {
|
||||
// the object implements a custom forEach method so use that
|
||||
object.forEach(block, context);
|
||||
return;
|
||||
} else if (typeof object == "string") {
|
||||
// the object is a string
|
||||
resolve = String;
|
||||
} else if (typeof object.length == "number") {
|
||||
// the object is array-like
|
||||
resolve = Array;
|
||||
}
|
||||
resolve.forEach(object, block, context);
|
||||
}
|
||||
};
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
maplibregl.addProtocol("cog", MaplibreCOGProtocol.cogProtocol);
|
||||
|
||||
map.on("load", () => {
|
||||
map.addSource("imageSource", {
|
||||
type: "raster",
|
||||
url: `cog://http://localhost:5000/cog?year=${year}&pid=${pid}`,
|
||||
tileSize: 256,
|
||||
minzoom: 0,
|
||||
});
|
||||
|
||||
map.addLayer({
|
||||
id: "imageLayer",
|
||||
source: "imageSource",
|
||||
type: "raster",
|
||||
});
|
||||
});
|
||||
|
Loading…
Reference in New Issue