diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..796603f --- /dev/null +++ b/.dockerignore @@ -0,0 +1,2 @@ +target +.env \ No newline at end of file diff --git a/.github/workflows/build-docker.yml b/.github/workflows/build-docker.yml new file mode 100644 index 0000000..1027958 --- /dev/null +++ b/.github/workflows/build-docker.yml @@ -0,0 +1,47 @@ +name: Build and Test + +on: + workflow_dispatch: + push: + branches: [ main, actions ] + +env: + CARGO_TERM_COLOR: always + +jobs: + build: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - name: Set up toolchain + uses: actions-rs/toolchain@v1 + with: + toolchain: stable + override: true + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v1 + - name: Cache Docker layers + uses: actions/cache@v2 + with: + path: /tmp/.buildx-cache + key: ${{ runner.os }}-buildx-${{ hashFiles('Cargo.lock') }} + restore-keys: | + ${{ runner.os }}-buildx- + - name: Login to DockerHub + uses: docker/login-action@v1 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Build and push + uses: docker/build-push-action@v2 + with: + context: . + file: ./Dockerfile + platforms: ${{github.event.inputs.platforms}} + push: true + cache-from: type=local,src=/tmp/.buildx-cache + cache-to: type=local,dest=/tmp/.buildx-cache + tags: | + trivernis/tobi:latest \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..fa0dc9d --- /dev/null +++ b/Dockerfile @@ -0,0 +1,24 @@ +# syntax=docker/dockerfile:1.0-experimental +FROM rust:latest AS builder +RUN apt-get update +RUN apt-get install -y build-essential libssl-dev libopus-dev libpq-dev +WORKDIR /usr/src +RUN USER=root cargo new tobi +WORKDIR /usr/src/tobi +COPY Cargo.toml Cargo.lock ./ +COPY src ./src +COPY bot-coreutils ./bot-coreutils +COPY bot-database ./bot-database +COPY bot-serenityutils ./bot-serenityutils +RUN --mount=type=cache,target=/usr/local/cargo/registry \ + --mount=type=cache,target=target \ + cargo build --release +RUN mkdir /tmp/tobi +RUN --mount=type=cache,target=target cp target/release/tobi-rs /tmp/tobi/ + +FROM bitnami/minideb:latest +RUN install_packages openssl libopus0 ffmpeg python3 python3-pip libpq5 +RUN pip3 install youtube-dl +RUN rm -rf /var/lib/{apt,dpkg,cache,log}/ +COPY --from=builder /tmp/tobi/tobi-rs . +ENTRYPOINT ["/tobi-rs"] \ No newline at end of file diff --git a/bot-database/src/lib.rs b/bot-database/src/lib.rs index 831a1c2..364a7ab 100644 --- a/bot-database/src/lib.rs +++ b/bot-database/src/lib.rs @@ -4,9 +4,9 @@ extern crate diesel; #[macro_use] extern crate diesel_migrations; -use crate::error::DatabaseResult; +use crate::error::{DatabaseError, DatabaseResult}; use diesel::prelude::*; -use diesel::r2d2::{ConnectionManager, Pool}; +use diesel::r2d2::{ConnectionManager, ManageConnection, Pool}; use std::env; pub mod database; @@ -22,11 +22,16 @@ type PoolConnection = Pool>; embed_migrations!("../bot-database/migrations"); fn get_connection() -> DatabaseResult { - dotenv::dotenv()?; let database_url = env::var("DATABASE_URL").expect("No DATABASE_URL in path"); log::debug!("Establishing database connection..."); let manager = ConnectionManager::::new(database_url); + log::trace!("Connecting..."); + manager + .connect() + .map_err(|e| DatabaseError::Msg(format!("{:?}", e)))?; + log::trace!("Creating pool..."); let pool = Pool::builder().max_size(16).build(manager)?; + log::trace!("Getting one connection to run migrations..."); let connection = pool.get()?; log::debug!("Running migrations..."); embedded_migrations::run(&connection)?; diff --git a/src/main.rs b/src/main.rs index 946cf23..8e9fd01 100644 --- a/src/main.rs +++ b/src/main.rs @@ -15,8 +15,12 @@ pub static VERSION: &str = env!("CARGO_PKG_VERSION"); #[tokio::main] async fn main() { + let _ = dotenv::dotenv(); init_logger(); - let mut client = get_client().await.unwrap(); + let mut client = get_client() + .await + .map_err(|e| log::error!("Failed to get client: {:?}", e)) + .expect("Failed to get client"); // start listening for events by starting a single shard if let Err(why) = client.start_autosharded().await {