Files
otc-metadata-rework/tools/collect_statistics.py
tischrei 95c38587fe
Some checks failed
Run Tox Check / tox-py312 (pull_request) Successful in 15s
Run Tox Check / tox-pep8 (pull_request) Failing after 14s
refactor variables
2025-08-04 10:37:37 +00:00

124 lines
3.7 KiB
Python

#!/usr/bin/python
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import requests
import json
import time
from datetime import datetime, timedelta
import os
import otc_metadata.services
import argparse
# ===== Configuration =====
USERNAME = os.getenv("UMAMI_USERNAME")
PASSWORD = os.getenv("UMAMI_PASSWORD")
OUTPUT_FILE = "stats.json"
def parse_args():
"""
Command-line arguments
"""
parser = argparse.ArgumentParser(description="Analytics Script")
parser.add_argument(
"--base-url",
default="https://analytics.otc-service.com",
help="Base_Url of analytics server"
)
parser.add_argument(
"--environment",
default=['public'],
nargs='+',
choices=['public', 'internal', 'hidden'],
help="Environments (default: ['public'])"
)
parser.add_argument(
"--cloud-environment",
default="eu_de",
choices=['eu_de', 'swiss'],
help="Cloud Environments (default: eu_de)"
)
parser.add_argument(
"--website-id",
required=True,
help="Umami Website ID"
)
return parser.parse_args()
def get_umami_token(base_url):
"""Get Bearer-Token from Umami-API."""
url = f"{base_url}/api/auth/login"
response = requests.post(url, json={"username": USERNAME, "password": PASSWORD})
response.raise_for_status()
return response.json().get("token")
def get_4_weeks_range():
"""Calculates start and end of 4 weeks range in UNIX timestamp format."""
end_date = datetime.utcnow()
start_date = end_date - timedelta(weeks=4)
start_ts = int(start_date.timestamp() * 1000)
end_ts = int(end_date.timestamp() * 1000)
return start_ts, end_ts
def fetch_pageviews(token, start_ts, end_ts, website_id, base_url):
"""Retrieves statistics from API server."""
headers = {"Authorization": f"Bearer {token}"}
url = f"{base_url}/api/websites/{website_id}/metrics"
params = {
"type": "url",
"startAt": start_ts,
"endAt": end_ts
}
response = requests.get(url, headers=headers, params=params)
response.raise_for_status()
return response
def filter_stats_by_existing_services(
stats, cloud_environment, environment):
"""Filter the stats to services which are existing"""
services = otc_metadata.services.Services().all_services_by_cloud_environment(
cloud_environment=cloud_environment,
environments=environment)
print(json.dumps(services))
res = services
return res
def save_to_file(data, filename):
"""Saves collected data in a file."""
print(data)
# with open(filename, "w", encoding="utf-8") as f:
# json.dump(data, f, indent=2, ensure_ascii=False)
def main():
args = parse_args()
token = get_umami_token(base_url=args.base_url)
start_ts, end_ts = get_4_weeks_range()
stats = fetch_pageviews(token, start_ts, end_ts, website_id=args.website_id, base_url=args.base_url)
filtered_stats = filter_stats_by_existing_services(
stats=stats,
cloud_environment=args.cloud_environment,
environment=args.environment
)
save_to_file(filtered_stats, OUTPUT_FILE)
print(f"Statistics saved in {OUTPUT_FILE}")
if __name__ == "__main__":
main()