Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions dockerfiles/microservices-dependencies.dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ ARG KAFKAOPMONVERSION=v2.0.0 # For OpMonSubscriber.py from kafkaopmon
ARG LOCALPYDIR=/microservices_python

RUN yum clean all \
&& yum -y install gcc make git unzip libpq-devel libffi-devel python3-pip python3-wheel krb5-devel python3-devel \
&& yum -y install gcc make git unzip postgresql libpq-devel libffi-devel python3-pip python3-wheel krb5-devel python3-devel \
&& yum clean all

# Can drop when migration to sqlalchemy is complete since we're using the thin client there
Expand All @@ -24,7 +24,7 @@ RUN python3 -m pip install --upgrade setuptools && \
# elisa_client_api needed by the logbook microservice
RUN git clone https://github.com/DUNE-DAQ/elisa_client_api.git && \
python3 -m pip install --upgrade setuptools && \
python3 -m pip install ./elisa_client_api
python3 -m pip install ./elisa_client_api

# protoc-24.3-linux-x86_64.zip is the latest zipfile available as of Sep-15-2023
# See also https://grpc.io/docs/protoc-installation/#install-pre-compiled-binaries-any-os
Expand All @@ -36,12 +36,12 @@ RUN curl -LO https://github.com/protocolbuffers/protobuf/releases/download/v24.3
protoc --python_out=$LOCALPYDIR/ers issue.proto && \
curl -O https://raw.githubusercontent.com/DUNE-DAQ/opmonlib/$OPMONLIBVERSION/schema/opmonlib/opmon_entry.proto && \
mkdir -p $LOCALPYDIR/opmonlib && \
protoc --python_out=$LOCALPYDIR/opmonlib -I/ -I/include opmon_entry.proto
protoc --python_out=$LOCALPYDIR/opmonlib -I/ -I/include opmon_entry.proto

RUN mkdir -p $LOCALPYDIR/erskafka && \
curl https://raw.githubusercontent.com/DUNE-DAQ/erskafka/$ERSKAFKAVERSION/python/erskafka/ERSSubscriber.py -o $LOCALPYDIR/erskafka/ERSSubscriber.py && \
mkdir -p $LOCALPYDIR/kafkaopmon && \
curl https://raw.githubusercontent.com/DUNE-DAQ/kafkaopmon/$KAFKAOPMONVERSION/python/kafkaopmon/OpMonSubscriber.py -o $LOCALPYDIR/kafkaopmon/OpMonSubscriber.py
curl https://raw.githubusercontent.com/DUNE-DAQ/kafkaopmon/$KAFKAOPMONVERSION/python/kafkaopmon/OpMonSubscriber.py -o $LOCALPYDIR/kafkaopmon/OpMonSubscriber.py

ENV PYTHONPATH=$LOCALPYDIR:$PYTHONPATH

Expand Down
7 changes: 7 additions & 0 deletions postgresql_partitions/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
These scripts expect that you've got `DATABASE_URI` defined in your environment.

You can make partitions with something like:

```shell
partitions_for_all_tables.sh | psql "${DATABASE_URI}"
```
10 changes: 10 additions & 0 deletions postgresql_partitions/partitions_for_all_tables.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
#!/bin/bash

# Simple script to loop through tables found in any non-system namespace
# excludes tables that are partitions so you only get "main" tables

FIND_TABLES="SELECT c.relname AS table_name FROM pg_class c JOIN pg_namespace n ON n.oid = c.relnamespace WHERE c.relkind IN ('r', 'p') AND n.nspname NOT IN ('pg_catalog', 'information_schema') AND c.relispartition = false ORDER BY c.relname;"

for table in $(echo "${FIND_TABLES}" | psql -t "${DATABASE_URI}"); do
$(dirname $0)/sample_postgresql_partitions_for_table.sh ${table}
done
Original file line number Diff line number Diff line change
@@ -1,15 +1,5 @@
#!/bin/bash

#
# SELECT c.relname AS table_name
# FROM pg_class c
# JOIN pg_namespace n ON n.oid = c.relnamespace
# WHERE c.relkind IN ('r', 'p')
# AND n.nspname NOT IN ('pg_catalog', 'information_schema')
# AND c.relispartition = false
# ORDER BY c.relname;
#

TABLE_NAME="${1:-example}"
YEAR="${2:-$(date '+%Y')}"

Expand Down Expand Up @@ -59,6 +49,7 @@ start_week=${START_DATE}
week_number=0
previous_partition=""

echo "BEGIN TRANSACTION;"
for count in {1..60}; do
week_number=$((week_number + 1))
week_count_string=$(printf "%02d" ${week_number})
Expand Down Expand Up @@ -115,3 +106,5 @@ done

# Output final partition if valid
output_partition_if_valid "${previous_partition}"

echo "END TRANSACTION;"