grafana-lokigrafana-alloy

Why Grafana Loki does not persist old logs?


I'm locally running Docker containers grafana/loki:3.4.2 + grafana/alloy:latest. I'm trying to upload 50 hours old logs into Loki in order to run some tests. However, Loki does not seem to persist any of these old logs (it does persist logs newer than 24 hours). Is there any way to upload old logs into Loki and query them?

Timestamp parsing in Grafana Alloy seems correct during live debugging. Grafana Loki successfully receives logs from Alloy ("push request parsed" message is present in Loki's logs).

GET http://localhost:3100/loki/api/v1/labels returns labels correctly. GET http://localhost:3100/loki/api/v1/series return empty response.

./docker.compose.yaml:

networks:
  loki:


volumes:
  alloy:
  loki:


services:
  loki:
    image: grafana/loki:3.4.2
    ports:
      - "3100:3100"
    command: -config.file=/etc/loki/local-config.yaml
    volumes:
      - './infrastructure/loki/local-config.yaml:/etc/loki/local-config.yaml:ro'
      - 'loki:/loki'
    networks:
      - loki
  alloy:
    image: grafana/alloy:latest
    ports:
      - "12345:12345"
    volumes:
      - './logs:/var/log/test:ro'
      - 'alloy:/var/lib/alloy/data'
      - './infrastructure/alloy/config.alloy:/etc/alloy/config.alloy:ro'
    command: run --server.http.listen-addr=0.0.0.0:12345 --storage.path=/var/lib/alloy/data /etc/alloy/config.alloy
    networks:
      - loki

./infrastructure/alloy/config.alloy

logging {
  level  = "debug"
  format = "json"
}
livedebugging {
  enabled = true
}
local.file_match "local_files" {
    path_targets = [{"__path__" = "/var/log/test/**/*.jsonl"}]
    sync_period = "5s"
}
loki.source.file "log_scrape" {
  targets    = local.file_match.local_files.targets
  forward_to = [loki.process.parse_logs.receiver]
  tail_from_end = false
}
loki.process "parse_logs" {
  forward_to = [loki.relabel.add_static_label.receiver]
  stage.json {
    expressions = {
      timestamp = "\"@t\"", 
      level = "\"@l\"",
      application = "\"@a\"",
    }
  }
  stage.timestamp {
    source = "timestamp"
    format = "RFC3339"
  }
  stage.labels {
    values = {
      level = "",
      application = "",
    }
  }
}
loki.relabel "add_static_label" {
    forward_to = [loki.write.grafana_loki.receiver]
    rule {
        target_label = "environment"
        replacement  = "dev"
    }
    rule {
        target_label = "system"
        replacement  = "main"
    }
}
loki.write "grafana_loki" {
  endpoint {
    url = "http://loki:3100/loki/api/v1/push"
  }
}

./infrastructure/loki/local-config.yaml

auth_enabled: false

server:
  http_listen_port: 3100
  log_level: debug

common:
  instance_addr: 127.0.0.1
  path_prefix: /loki
  storage:
    filesystem:
      chunks_directory: /loki/chunks
      rules_directory: /loki/rules
  replication_factor: 1
  ring:
    kvstore:
      store: inmemory

compactor:
  working_directory: /loki/compactor
  compaction_interval: 1m
  retention_enabled: true
  retention_delete_delay: 1m
  retention_delete_worker_count: 10
  delete_request_store: filesystem

limits_config:
  retention_period: 8760h
  reject_old_samples: false
  reject_old_samples_max_age: 8760h

schema_config:
  configs:
    - from: 2020-10-24
      store: tsdb
      object_store: filesystem
      schema: v13
      index:
        prefix: index_
        period: 24h

ruler:
  alertmanager_url: ""

analytics:
  reporting_enabled: false

./logs/test/log20250516.jsonl

{"@t":"2025-05-16T11:21:10.5111356+00:00","@l":"Information","@m":"Now listening on: \"http://[::]:8080\"","@i":"d826f4b8","address":"http://[::]:8080","EventId":{"Id":14,"Name":"ListeningOnAddress"},"SourceContext":"Microsoft.Hosting.Lifetime","@a":"test"}
{"@t":"2025-05-16T11:21:10.5123889+00:00","@l":"Information","@m":"Application started. Press Ctrl+C to shut down.","@i":"dcaefe54","SourceContext":"Microsoft.Hosting.Lifetime","@a":"test"}

Solution

  • Adding this section to Loki configuration solved the issue:

    ingester:
      query_store_max_look_back_period: -1