Repository: pintsized/ledge
Branch: master
Commit: 2a872096e73c
Files: 94
Total size: 733.6 KB
Directory structure:
gitextract_rst1uaiw/
├── .gitattributes
├── .github/
│ └── FUNDING.yml
├── .gitignore
├── .luacheckrc
├── .luacov
├── .travis.yml
├── Makefile
├── README.md
├── dist.ini
├── docker/
│ └── tests/
│ └── docker-compose.yml
├── lib/
│ ├── ledge/
│ │ ├── background.lua
│ │ ├── cache_key.lua
│ │ ├── collapse.lua
│ │ ├── esi/
│ │ │ ├── processor_1_0.lua
│ │ │ └── tag_parser.lua
│ │ ├── esi.lua
│ │ ├── gzip.lua
│ │ ├── handler.lua
│ │ ├── header_util.lua
│ │ ├── jobs/
│ │ │ ├── collect_entity.lua
│ │ │ ├── purge.lua
│ │ │ └── revalidate.lua
│ │ ├── purge.lua
│ │ ├── range.lua
│ │ ├── request.lua
│ │ ├── response.lua
│ │ ├── stale.lua
│ │ ├── state_machine/
│ │ │ ├── actions.lua
│ │ │ ├── events.lua
│ │ │ ├── pre_transitions.lua
│ │ │ └── states.lua
│ │ ├── state_machine.lua
│ │ ├── storage/
│ │ │ └── redis.lua
│ │ ├── util.lua
│ │ ├── validation.lua
│ │ └── worker.lua
│ └── ledge.lua
├── migrations/
│ └── 1.26-1.27.lua
├── t/
│ ├── 01-unit/
│ │ ├── cache_key.t
│ │ ├── esi.t
│ │ ├── events.t
│ │ ├── handler.t
│ │ ├── jobs.t
│ │ ├── ledge.t
│ │ ├── processor_1_0.t
│ │ ├── purge.t
│ │ ├── range.t
│ │ ├── request.t
│ │ ├── response.t
│ │ ├── stale.t
│ │ ├── state_machine.t
│ │ ├── storage.t
│ │ ├── tag_parser.t
│ │ ├── util.t
│ │ ├── validation.t
│ │ └── worker.t
│ ├── 02-integration/
│ │ ├── age.t
│ │ ├── cache.t
│ │ ├── collapsed_forwarding.t
│ │ ├── esi.t
│ │ ├── events.t
│ │ ├── gc.t
│ │ ├── gzip.t
│ │ ├── hop_by_hop_headers.t
│ │ ├── max-stale.t
│ │ ├── max_size.t
│ │ ├── memory_pressure.t
│ │ ├── multiple_headers.t
│ │ ├── on_abort.t
│ │ ├── origin_mode.t
│ │ ├── purge.t
│ │ ├── range.t
│ │ ├── req_body.t
│ │ ├── req_method.t
│ │ ├── request_leak.t
│ │ ├── response.t
│ │ ├── ssl.t
│ │ ├── stale-if-error.t
│ │ ├── stale-while-revalidate.t
│ │ ├── upstream.t
│ │ ├── upstream_client.t
│ │ ├── validation.t
│ │ ├── vary.t
│ │ └── via_header.t
│ ├── 03-sentinel/
│ │ ├── 01-master_up.t
│ │ ├── 02-master_down.t
│ │ └── 03-slave_promoted.t
│ ├── LedgeEnv.pm
│ └── cert/
│ ├── example.com.crt
│ ├── example.com.key
│ ├── rootCA.key
│ ├── rootCA.pem
│ └── rootCA.srl
└── util/
└── lua-releng
================================================
FILE CONTENTS
================================================
================================================
FILE: .gitattributes
================================================
*.t linguist-language=lua
================================================
FILE: .github/FUNDING.yml
================================================
github: pintsized
================================================
FILE: .gitignore
================================================
t/servroot/
t/error.log
dump.rdb
stdout
luacov.*
*.src.rock
================================================
FILE: .luacheckrc
================================================
std = "ngx_lua"
redefined = false
================================================
FILE: .luacov
================================================
modules = {
["ledge"] = "lib/ledge.lua",
["ledge.esi.*"] = "lib/",
["ledge.jobs.*"] = "lib/",
["ledge.state_machine.*"] = "lib/",
["ledge.storage.*"] = "lib/",
["ledge.*"] = "lib/"
}
================================================
FILE: .travis.yml
================================================
services:
- docker
script:
- cd docker/tests
- docker-compose run --rm runner
================================================
FILE: Makefile
================================================
SHELL := /bin/bash # Cheat by using bash :)
OPENRESTY_PREFIX = /usr/local/openresty
TEST_FILE ?= t/01-unit t/02-integration
SENTINEL_TEST_FILE ?= t/03-sentinel
TEST_LEDGE_REDIS_HOST ?= 127.0.0.1
TEST_LEDGE_REDIS_PORT ?= 6379
TEST_LEDGE_REDIS_DATABASE ?= 2
TEST_LEDGE_REDIS_QLESS_DATABASE ?= 3
TEST_NGINX_HOST ?= 127.0.0.1
# Command line arguments for ledge tests
TEST_LEDGE_REDIS_VARS = PATH=$(OPENRESTY_PREFIX)/nginx/sbin:$(PATH) \
TEST_LEDGE_REDIS_HOST=$(TEST_LEDGE_REDIS_HOST) \
TEST_LEDGE_REDIS_PORT=$(TEST_LEDGE_REDIS_PORT) \
TEST_LEDGE_REDIS_SOCKET=unix://$(TEST_LEDGE_REDIS_SOCKET) \
TEST_LEDGE_REDIS_DATABASE=$(TEST_LEDGE_REDIS_DATABASE) \
TEST_LEDGE_REDIS_QLESS_DATABASE=$(TEST_LEDGE_REDIS_QLESS_DATABASE) \
TEST_NGINX_HOST=$(TEST_NGINX_HOST) \
TEST_NGINX_NO_SHUFFLE=1
REDIS_CLI := redis-cli -h $(TEST_LEDGE_REDIS_HOST) -p $(TEST_LEDGE_REDIS_PORT)
###############################################################################
# Deprecated, ues docker copose to run Redis instead
###############################################################################
REDIS_CMD = redis-server
SENTINEL_CMD = $(REDIS_CMD) --sentinel
REDIS_SOCK = /redis.sock
REDIS_PID = /redis.pid
REDIS_LOG = /redis.log
REDIS_PREFIX = /tmp/redis-
# Overrideable ledge test variables
TEST_LEDGE_REDIS_PORTS ?= 6379 6380
REDIS_FIRST_PORT := $(firstword $(TEST_LEDGE_REDIS_PORTS))
REDIS_SLAVE_ARG := --slaveof 127.0.0.1 $(REDIS_FIRST_PORT)
# Override ledge socket for running make test on its' own
# (make test TEST_LEDGE_REDIS_SOCKET=/path/to/sock.sock)
TEST_LEDGE_REDIS_SOCKET ?= $(REDIS_PREFIX)$(REDIS_FIRST_PORT)$(REDIS_SOCK)
# Overrideable ledge + sentinel test variables
TEST_LEDGE_SENTINEL_PORTS ?= 26379 26380 26381
TEST_LEDGE_SENTINEL_MASTER_NAME ?= mymaster
TEST_LEDGE_SENTINEL_PROMOTION_TIME ?= 20
# Command line arguments for ledge + sentinel tests
TEST_LEDGE_SENTINEL_VARS = PATH=$(OPENRESTY_PREFIX)/nginx/sbin:$(PATH) \
TEST_LEDGE_SENTINEL_PORT=$(firstword $(TEST_LEDGE_SENTINEL_PORTS)) \
TEST_LEDGE_SENTINEL_MASTER_NAME=$(TEST_LEDGE_SENTINEL_MASTER_NAME) \
TEST_LEDGE_REDIS_DATABASE=$(TEST_LEDGE_REDIS_DATABASE) \
TEST_NGINX_NO_SHUFFLE=1
# Sentinel configuration can only be set by a config file
define TEST_LEDGE_SENTINEL_CONFIG
sentinel monitor $(TEST_LEDGE_SENTINEL_MASTER_NAME) 127.0.0.1 $(REDIS_FIRST_PORT) 2
sentinel down-after-milliseconds $(TEST_LEDGE_SENTINEL_MASTER_NAME) 2000
sentinel failover-timeout $(TEST_LEDGE_SENTINEL_MASTER_NAME) 10000
sentinel parallel-syncs $(TEST_LEDGE_SENTINEL_MASTER_NAME) 5
endef
export TEST_LEDGE_SENTINEL_CONFIG
SENTINEL_CONFIG_PREFIX = /tmp/sentinel
###############################################################################
PREFIX ?= /usr/local
LUA_INCLUDE_DIR ?= $(PREFIX)/include
LUA_LIB_DIR ?= $(PREFIX)/lib/lua/$(LUA_VERSION)
PROVE ?= prove -I ../test-nginx/lib
INSTALL ?= install
.PHONY: all install test test_all start_redis_instances stop_redis_instances \
start_redis_instance stop_redis_instance cleanup_redis_instance flush_db \
check_ports test_ledge test_sentinel coverage delete_sentinel_config check
all: ;
install: all
$(INSTALL) -d $(DESTDIR)/$(LUA_LIB_DIR)/ledge
$(INSTALL) lib/ledge/*.lua $(DESTDIR)/$(LUA_LIB_DIR)/ledge
test: test_ledge
test_all: start_redis_instances test_ledge test_sentinel stop_redis_instances
###############################################################################
# Deprecated, ues docker copose to run Redis instead
##############################################################################
start_redis_instances: check_ports
@$(foreach port,$(TEST_LEDGE_REDIS_PORTS), \
[[ "$(port)" != "$(REDIS_FIRST_PORT)" ]] && \
SLAVE="$(REDIS_SLAVE_ARG)" || \
SLAVE="" && \
$(MAKE) start_redis_instance args="$$SLAVE" port=$(port) \
prefix=$(REDIS_PREFIX)$(port) && \
) true
@$(foreach port,$(TEST_LEDGE_SENTINEL_PORTS), \
echo "port $(port)" > "$(SENTINEL_CONFIG_PREFIX)-$(port).conf"; \
echo "$$TEST_LEDGE_SENTINEL_CONFIG" >> "$(SENTINEL_CONFIG_PREFIX)-$(port).conf"; \
$(MAKE) start_redis_instance \
port=$(port) args="$(SENTINEL_CONFIG_PREFIX)-$(port).conf --sentinel" \
prefix=$(REDIS_PREFIX)$(port) && \
) true
stop_redis_instances: delete_sentinel_config
-@$(foreach port,$(TEST_LEDGE_REDIS_PORTS) $(TEST_LEDGE_SENTINEL_PORTS), \
$(MAKE) stop_redis_instance cleanup_redis_instance port=$(port) \
prefix=$(REDIS_PREFIX)$(port) && \
) true 2>&1 > /dev/null
start_redis_instance:
-@echo "Starting redis on port $(port) with args: \"$(args)\""
-@mkdir -p $(prefix)
@$(REDIS_CMD) $(args) \
--pidfile $(prefix)$(REDIS_PID) \
--bind 127.0.0.1 --port $(port) \
--unixsocket $(prefix)$(REDIS_SOCK) \
--unixsocketperm 777 \
--dir $(prefix) \
--logfile $(prefix)$(REDIS_LOG) \
--loglevel debug \
--daemonize yes
stop_redis_instance:
-@echo "Stopping redis on port $(port)"
-@[[ -f "$(prefix)$(REDIS_PID)" ]] && kill -QUIT \
`cat $(prefix)$(REDIS_PID)` 2>&1 > /dev/null || true
cleanup_redis_instance: stop_redis_instance
-@echo "Cleaning up redis files in $(prefix)"
-@rm -rf $(prefix)
delete_sentinel_config:
-@echo "Cleaning up sentinel config files"
-@rm -f $(SENTINEL_CONFIG_PREFIX)-*.conf
check_ports:
-@echo "Checking ports $(REDIS_PORTS)"
@$(foreach port,$(REDIS_PORTS),! lsof -i :$(port) &&) true 2>&1 > /dev/null
###############################################################################
releng:
@util/lua-releng -eL
flush_db:
@$(REDIS_CLI) flushall
test_ledge: releng flush_db
@$(TEST_LEDGE_REDIS_VARS) $(PROVE) $(TEST_FILE)
-@echo "Qless errors:"
@$(REDIS_CLI) -n $(TEST_LEDGE_REDIS_QLESS_DATABASE) llen ql:f:job-error
test_sentinel: releng flush_db
$(TEST_LEDGE_SENTINEL_VARS) $(PROVE) $(SENTINEL_TEST_FILE)/01-master_up.t
$(REDIS_CLI) shutdown
$(TEST_LEDGE_SENTINEL_VARS) $(PROVE) $(SENTINEL_TEST_FILE)/02-master_down.t
sleep $(TEST_LEDGE_SENTINEL_PROMOTION_TIME)
$(TEST_LEDGE_SENTINEL_VARS) $(PROVE) $(SENTINEL_TEST_FILE)/03-slave_promoted.t
test_leak: releng flush_db
$(TEST_LEDGE_REDIS_VARS) TEST_NGINX_CHECK_LEAK=1 $(PROVE) $(TEST_FILE)
coverage: releng flush_db
@rm -f luacov.stats.out
@$(TEST_LEDGE_REDIS_VARS) TEST_COVERAGE=1 $(PROVE) $(TEST_FILE)
@luacov
@tail -30 luacov.report.out
-@echo "Qless errors:"
@$(REDIS_CLI) -n $(TEST_LEDGE_REDIS_QLESS_DATABASE) llen ql:f:job-error
check:
luacheck lib
================================================
FILE: README.md
================================================
# Ledge
[](https://travis-ci.org/ledgetech/ledge)
An RFC compliant and [ESI](https://www.w3.org/TR/esi-lang) capable HTTP cache for [Nginx](http://nginx.org) / [OpenResty](https://openresty.org), backed by [Redis](http://redis.io).
Ledge can be utilised as a fast, robust and scalable alternative to Squid / Varnish etc, either installed standalone or integrated into an existing Nginx server or load balancer.
Moreover, it is particularly suited to applications where the origin is expensive or distant, making it desirable to serve from cache as optimistically as possible.
## Table of Contents
* [Installation](#installation)
* [Philosophy and Nomenclature](#philosophy-and-nomenclature)
* [Cache keys](#cache-keys)
* [Streaming design](#streaming-design)
* [Collapsed forwarding](#collapsed-forwarding)
* [Advanced cache patterns](#advanced-cache-patterns)
* [Minimal configuration](#minimal-configuration)
* [Config systems](#config-systems)
* [Events system](#events-system)
* [Caching basics](#caching-basics)
* [Purging](#purging)
* [Serving stale](#serving-stale)
* [Edge Side Includes](#edge-side-includes)
* [API](#api)
* [ledge.configure](#ledgeconfigure)
* [ledge.set_handler_defaults](#ledgeset_handler_defaults)
* [ledge.create\_handler](#ledgecreate_handler)
* [ledge.create\_worker](#ledgecreate_worker)
* [ledge.bind](#ledgebind)
* [handler.bind](#handlerbind)
* [handler.run](#handlerrun)
* [worker.run](#workerrun)
* [Handler configuration options](#handler-configuration-options)
* [Events](#events)
* [Administration](#administration)
* [Managing Qless](#managing-qless)
* [Licence](#licence)
## Installation
[OpenResty](http://openresty.org/) is a superset of [Nginx](http://nginx.org), bundling [LuaJIT](http://luajit.org/) and the [lua-nginx-module](https://github.com/openresty/lua-nginx-module) as well as many other things. Whilst it is possible to build all of these things into Nginx yourself, we recommend using the latest OpenResty.
### 1. Download and install:
* [OpenResty](http://openresty.org/) >= 1.11.x
* [Redis](http://redis.io/download) >= 2.8.x
* [LuaRocks](https://luarocks.org/)
### 2. Install Ledge using LuaRocks:
```
luarocks install ledge
```
This will install the latest stable release, and all other Lua module dependencies, which if installing manually without LuaRocks are:
* [lua-resty-http](https://github.com/pintsized/lua-resty-http)
* [lua-resty-redis-connector](https://github.com/pintsized/lua-resty-redis-connector)
* [lua-resty-qless](https://github.com/pintsized/lua-resty-qless)
* [lua-resty-cookie](https://github.com/cloudflare/lua-resty-cookie)
* [lua-ffi-zlib](https://github.com/hamishforbes/lua-ffi-zlib)
* [lua-resty-upstream](https://github.com/hamishforbes/lua-resty-upstream) *(optional, for load balancing / healthchecking upstreams)*
### 3. Review OpenResty documentation
If you are new to OpenResty, it's quite important to review the [lua-nginx-module](https://github.com/openresty/lua-nginx-module) documentation on how to run Lua code in Nginx, as the environment is unusual. Specifcally, it's useful to understand the meaning of the different Nginx phase hooks such as `init_by_lua` and `content_by_lua`, as well as how the `lua-nginx-module` locates Lua modules with the [lua_package_path](https://github.com/openresty/lua-nginx-module#lua_package_path) directive.
[Back to TOC](#table-of-contents)
## Philosophy and Nomenclature
The central module is called `ledge`, and provides factory methods for creating `handler` instances (for handling a request) and `worker` instances (for running background tasks). The `ledge` module is also where global configuration is managed.
A `handler` is short lived. It is typically created at the beginning of the Nginx `content` phase for a request, and when its [run()](#handlerrun) method is called, takes responsibility for processing the current request and delivering a response. When [run()](#handlerrun) has completed, HTTP status, headers and body will have been delivered to the client.
A `worker` is long lived, and there is one per Nginx worker process. It is created when Nginx starts a worker process, and dies when the Nginx worker dies. The `worker` pops queued background jobs and processes them.
An `upstream` is the only thing which must be manually configured, and points to another HTTP host where actual content lives. Typically one would use DNS to resolve client connections to the Nginx server running Ledge, and tell Ledge where to fetch from with the `upstream` configuration. As such, Ledge isn't designed to work as a forwarding proxy.
[Redis](http://redis.io) is used for much more than cache storage. We rely heavily on its data structures to maintain cache `metadata`, as well as embedded Lua scripts for atomic task management and so on. By default, all cache body data and `metadata` will be stored in the same Redis instance. The location of cache `metadata` is global, set when Nginx starts up.
Cache body data is handled by the `storage` system, and as mentioned, by default shares the same Redis instance as the `metadata`. However, `storage` is abstracted via a [driver system](#storage_driver) making it possible to store cache body data in a separate Redis instance, or a group of horizontally scalable Redis instances via a [proxy](https://github.com/twitter/twemproxy), or to roll your own `storage` driver, for example targeting PostreSQL or even simply a filesystem. It's perhaps important to consider that by default all cache storage uses Redis, and as such is bound by system memory.
[Back to TOC](#table-of-contents)
### Cache keys
A goal of any caching system is to safely maximise the HIT potential. That is, normalise factors which would split the cache wherever possible, in order to share as much cache as possible.
This is tricky to generalise, and so by default Ledge puts sane defaults from the request URI into the cache key, and provides a means for this to be customised by altering the [cache\_key\_spec](#cache_key_spec).
URI arguments are sorted alphabetically by default, so `http://example.com?a=1&b=2` would hit the same cache entry as `http://example.com?b=2&a=1`.
[Back to TOC](#table-of-contents)
### Streaming design
HTTP response sizes can be wildly different, sometimes tiny and sometimes huge, and it's not always possible to know the total size up front.
To guarantee predictable memory usage regardless of response sizes Ledge operates a streaming design, meaning it only ever operates on a single `buffer` per request at a time. This is equally true when fetching upstream to when reading from cache or serving to the client request.
It's also true (mostly) when processing [ESI](#edge-size-includes) instructions, except for in the case where an instruction is found to span multiple buffers. In this case, we continue buffering until a complete instruction can be understood, up to a [configurable limit](#esi_max_size).
This streaming design also improves latency, since we start serving the first `buffer` to the client request as soon as we're done with it, rather than fetching and saving an entire resource prior to serving. The `buffer` size can be [tuned](#buffer_size) even on a per `location` basis.
[Back to TOC](#table-of-contents)
### Collapsed forwarding
Ledge can attempt to collapse concurrent origin requests for known (previously) cacheable resources into a single upstream request. That is, if an upstream request for a resource is in progress, subsequent concurrent requests for the same resource will not bother the upstream, and instead wait for the first request to finish.
This is particularly useful to reduce upstream load if a spike of traffic occurs for expired and expensive content (since the chances of concurrent requests is higher on slower content).
[Back to TOC](#table-of-contents)
### Advanced cache patterns
Beyond standard RFC compliant cache behaviours, Ledge has many features designed to maximise cache HIT rates and to reduce latency for requests. See the sections on [Edge Side Includes](#edge-side-includes), [serving stale](#serving-stale) and [revalidating on purge](#purging) for more information.
[Back to TOC](#table-of-contents)
## Minimal configuration
Assuming you have Redis running on `localhost:6379`, and your upstream is at `localhost:8080`, add the following to the `nginx.conf` file in your OpenResty installation.
```lua
http {
if_modified_since Off;
lua_check_client_abort On;
init_by_lua_block {
require("ledge").configure({
redis_connector_params = {
url = "redis://127.0.0.1:6379/0",
},
})
require("ledge").set_handler_defaults({
upstream_host = "127.0.0.1",
upstream_port = 8080,
})
}
init_worker_by_lua_block {
require("ledge").create_worker():run()
}
server {
server_name example.com;
listen 80;
location / {
content_by_lua_block {
require("ledge").create_handler():run()
}
}
}
}
```
[Back to TOC](#table-of-contents)
## Config systems
There are four different layers to the configuration system. Firstly there is the main [Redis config](#ledgeconfigure) and [handler defaults](#ledgeset_handler_defaults) config, which are global and must be set during the Nginx `init` phase.
Beyond this, you can specify [handler instance config](#ledgecreate_handler) on an Nginx `location` block basis, and finally there are some performance tuning config options for the [worker](#ledgecreate_worker) instances.
In addition, there is an [events system](#events-system) for binding Lua functions to mid-request events, proving opportunities to dynamically alter configuration.
[Back to TOC](#table-of-contents)
## Events system
Ledge makes most of its decisions based on the content it is working with. HTTP request and response headers drive the semantics for content delivery, and so rather than having countless configuration options to change this, we instead provide opportunities to alter the given semantics when necessary.
For example, if an `upstream` fails to set a long enough cache expiry, rather than inventing an option such as "extend\_ttl", we instead would `bind` to the `after_upstream_request` event, and adjust the response headers to include the ttl we're hoping for.
```lua
handler:bind("after_upstream_request", function(res)
res.header["Cache-Control"] = "max-age=86400"
end)
```
This particular event fires after we've fetched upstream, but before Ledge makes any decisions about whether the content can be cached or not. Once we've adjustead our headers, Ledge will read them as if they came from the upstream itself.
Note that multiple functions can be bound to a single event, either globally or per handler, and they will be called in the order they were bound. There is also currently no means to inspect which functions have been bound, or to unbind them.
See the [events](#events) section for a complete list of events and their definitions.
[Back to TOC](#table-of-contents)
### Binding globally
Binding a function globally means it will fire for the given event, on all requests. This is perhaps useful if you have many different `location` blocks, but need to always perform the same logic.
```lua
init_by_lua_block {
require("ledge").bind("before_serve", function(res)
res.header["X-Foo"] = "bar" -- always set X-Foo to bar
end)
}
```
[Back to TOC](#table-of-contents)
### Binding to handlers
More commonly, we just want to alter behaviour for a given Nginx `location`.
```lua
location /foo_location {
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("before_serve", function(res)
res.header["X-Foo"] = "bar" -- only set X-Foo for this location
end)
handler:run()
}
}
```
[Back to TOC](#table-of-contents)
### Performance implications
Writing simple logic for events is not expensive at all (and in many cases will be JIT compiled). If you need to consult service endpoints during an event then obviously consider that this will affect your overall latency, and make sure you do everything in a **non-blocking** way, e.g. using [cosockets](https://github.com/openresty/lua-nginx-module#ngxsockettcp) provided by OpenResty, or a driver based upon this.
If you have lots of event handlers, consider that creating closures in Lua is relatively expensive. A good solution would be to make your own module, and pass the defined functions in.
```lua
location /foo_location {
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("before_serve", require("my.handler.hooks").add_foo_header)
handler:run()
}
}
```
[Back to TOC](#table-of-contents)
## Caching basics
For normal HTTP caching operation, no additional configuration is required. If the HTTP response indicates the resource can be cached, then it will cache it. If the HTTP request indicates it accepts cache, it will be served cache. Note that these two conditions aren't mutually exclusive - a request could specify `no-cache`, and this will indeed trigger a fetch upstream, but if the response is cacheable then it will be saved and served to subsequent cache-accepting requests.
For more information on the myriad factors affecting this, including end-to-end revalidation and so on, please refer to [RFC 7234](https://tools.ietf.org/html/rfc7234).
The goal is to be 100% RFC compliant, but with some extensions to allow more agressive caching in certain cases. If something doesn't work as you expect, please do feel free to [raise an issue](https://github.com/pintsized/ledge).
[Back to TOC](#table-of-contents)
## Purging
To manually invalidate a cache item (or purge), we support the non-standard `PURGE` method familiar to users of Squid. Send a HTTP request to the URI with the method set, and Ledge will attempt to invalidate the item, returning status `200` on success and `404` if the URI was not found in cache, along with a JSON body for more details.
A purge request will affect all representations associated with the cache key, for example compressed and uncompressed responses separated by the `Vary: Accept-Encoding` response header will all be purged.
`$> curl -X PURGE -H "Host: example.com" http://cache.example.com/page1 | jq .`
```json
{
"purge_mode": "invalidate",
"result": "nothing to purge"
}
```
There are three purge modes, selectable by setting the `X-Purge` request header with one or more of the following values:
* `invalidate`: (default) marks the item as expired, but doesn't delete anything.
* `delete`: hard removes the item from cache
* `revalidate`: invalidates but then schedules a background revalidation to re-prime the cache.
`$> curl -X PURGE -H "X-Purge: revalidate" -H "Host: example.com" http://cache.example.com/page1 | jq .`
```json
{
"purge_mode": "revalidate",
"qless_job": {
"options": {
"priority": 4,
"jid": "5eeabecdc75571d1b93e9c942dfcebcb",
"tags": [
"revalidate"
]
},
"jid": "5eeabecdc75571d1b93e9c942dfcebcb",
"klass": "ledge.jobs.revalidate"
},
"result": "already expired"
}
```
Background revalidation jobs can be tracked in the qless metadata. See [managing qless](#managing-qless) for more information.
In general, `PURGE` is considered an administration task and probably shouldn't be allowed from the internet. Consider limiting it by IP address for example:
```nginx
limit_except GET POST PUT DELETE {
allow 127.0.0.1;
deny all;
}
```
[Back to TOC](#table-of-contents)
### JSON API
A JSON based API is also available for purging cache multiple cache items at once.
This requires a `PURGE` request with a `Content-Type` header set to `application/json` and a valid JSON request body.
Valid parameters
* `uris` - Array of URIs to purge, can contain wildcard URIs
* `purge_mode` - As the `X-Purge` header in a normal purge request
* `headers` - Hash of additional headers to include in the purge request
Returns a results hash keyed by URI or a JSON error response
`$> curl -X PURGE -H "Content-Type: Application/JSON" http://cache.example.com/ -d '{"uris": ["http://www.example.com/1", "http://www.example.com/2"]}' | jq .`
```json
{
"purge_mode": "invalidate",
"result": {
"http://www.example.com/1": {
"result": "purged"
},
"http://www.example.com/2":{
"result": "nothing to purge"
}
}
}
```
[Back to TOC](#table-of-contents)
### Wildcard purging
Wildcard (\*) patterns are also supported in `PURGE` URIs, which will always return a status of `200` and a JSON body detailing a background job. Wildcard purges involve scanning the entire keyspace, and so can take a little while. See [keyspace\_scan\_count](#keyspace_scan_count) for tuning help.
In addition, the `X-Purge` mode will propagate to all URIs purged as a result of the wildcard, making it possible to trigger site / section wide revalidation for example. Be careful what you wish for.
`$> curl -v -X PURGE -H "X-Purge: revalidate" -H "Host: example.com" http://cache.example.com/* | jq .`
```json
{
"purge_mode": "revalidate",
"qless_job": {
"options": {
"priority": 5,
"jid": "b2697f7cb2e856cbcad1f16682ee20b0",
"tags": [
"purge"
]
},
"jid": "b2697f7cb2e856cbcad1f16682ee20b0",
"klass": "ledge.jobs.purge"
},
"result": "scheduled"
}
```
[Back to TOC](#table-of-contents)
## Serving stale
Content is considered "stale" when its age is beyond its TTL. However, depending on the value of [keep_cache_for](#keep_cache_for) (which defaults to 1 month), we don't actually expire content in Redis straight away.
This allows us to implement the stale cache control extensions described in [RFC5861](https://tools.ietf.org/html/rfc5861), which provides request and response header semantics for describing how stale something can be served, when it should be revalidated in the background, and how long we can serve stale content in the event of upstream errors.
This can be very effective in ensuring a fast user experience. For example, if your content has a genuine `max-age` of 24 hours, consider changing this to 1 hour, and adding `stale-while-revalidate` for 23 hours. The total TTL is therefore the same, but the first request after the first hour will trigger backgrounded revalidation, extending the TTL for a further 1 hour + 23 hours.
If your origin server cannot be configured in this way, you can always override by [binding](#events) to the [before_save](#before_save) event.
```lua
handler:bind("before_save", function(res)
-- Valid for 1 hour, stale-while-revalidate for 23 hours, stale-if-error for three days
res.header["Cache-Control"] = "max-age=3600, stale-while-revalidate=82800, stale-if-error=259200"
end)
```
In other words, set the TTL to the highest comfortable frequency of requests at the origin, and `stale-while-revalidate` to the longest comfortable TTL, to increase the chances of background revalidation occurring. Note that the first stale request will obviously get stale content, and so very long values can result in very out of date content for one request.
All stale behaviours are constrained by normal cache control semantics. For example, if the origin is down, and the response could be served stale due to the upstream error, but the request contains `Cache-Control: no-cache` or even `Cache-Control: max-age=60` where the content is older than 60 seconds, they will be served the error, rather than the stale content.
[Back to TOC](#table-of-contents)
## Edge Side Includes
Almost complete support for the [ESI 1.0 Language Specification](https://www.w3.org/TR/esi-lang) is included, with a few exceptions, and a few enhancements.
```html
Hi
```
[Back to TOC](#table-of-contents)
### Enabling ESI
Note that simply [enabling](#esi_enabled) ESI might not be enough. We also check the [content type](#esi_content_types) against the allowed types specified, but more importantly ESI processing is contingent upon the [Edge Architecture Specification](https://www.w3.org/TR/edge-arch/). When enabled, Ledge will advertise capabilities upstream with the `Surrogate-Capability` request header, and expect the upstream response to include a `Surrogate-Control` header delegating ESI processing to Ledge.
If your upstream is not ESI aware, a common approach is to bind to the [after\_upstream\_request](#after_upstream_request) event in order to add the `Surrogate-Control` header manually. E.g.
```lua
handler:bind("after_upstream_request", function(res)
-- Don't enable ESI on redirect responses
-- Don't override Surrogate Control if it already exists
local status = res.status
if not res.header["Surrogate-Control"] and not (status > 300 and status < 303) then
res.header["Surrogate-Control"] = 'content="ESI/1.0"'
end
end)
```
Note that if ESI is processed, downstream cache-ability is automatically dropped since you don't want other intermediaries or browsers caching the result.
It's therefore best to only set `Surrogate-Control` for content which you know has ESI instructions. Whilst Ledge will detect the presence of ESI instructions when saving (and do nothing on cache HITs if no instructions are present), on a cache MISS it will have already dropped downstream cache headers before reading / saving the body. This is a side-effect of the [streaming design](#streaming-design).
[Back to TOC](#table-of-contents)
### Regular expressions in conditions
In addition to the operators defined in the
[ESI specification](https://www.w3.org/TR/esi-lang), we also support regular
expressions in conditions (as string literals), using the `=~` operator.
```html
Hi James or John
```
Supported modifiers are as per the [ngx.re.\*](https://github.com/openresty/lua-nginx-module#ngxrematch) documentation.
[Back to TOC](#table-of-contents)
### Custom ESI variables
In addition to the variables defined in the [ESI specification](https://www.w3.org/TR/esi-lang), it is possible to provide run time custom variables using the [esi_custom_variables](#esi_custom_variables) handler config option.
```lua
content_by_lua_block {
require("ledge").create_handler({
esi_custom_variables = {
messages = {
foo = "bar",
},
},
}):run()
}
```
```html
$(MESSAGES{foo})
```
[Back to TOC](#table-of-contents)
### ESI Args
It can be tempting to use URI arguments to pages using ESI in order to change layout dynamically, but this comes at the cost of generating multiple cache items - one for each permutation of URI arguments.
ESI args is a neat feature to get around this, by using a configurable [prefix](#esi_args_prefix), which defaults to `esi_`. URI arguments with this prefix are removed from the cache key and also from upstream requests, and instead stuffed into the `$(ESI_ARGS{foo})` variable for use in ESI, typically in conditions. That is, think of them as magic URI arguments which have meaning for the ESI processor only, and should never affect cacheability or upstream content generation.
`$> curl -H "Host: example.com" http://cache.example.com/page1?esi_display_mode=summary`
```html
```
In this example, the `esi_display_mode` values of `summary` or `details` will return the same cache HIT, but display different content.
If `$(ESI_ARGS)` is used without a field key, it renders the original query string arguments, e.g. `esi_foo=bar&esi_display_mode=summary`, URL encoded.
[Back to TOC](#table-of-contents)
### Variable Escaping
ESI variables are minimally escaped by default in order to prevent user's injecting additional ESI tags or XSS exploits.
Unescaped variables are available by prefixing the variable name with `RAW_`. This should be used with care.
```html
# /esi/test.html?a=
$(QUERY_STRING{a})
$(RAW_QUERY_STRING{a})
```
[Back to TOC](#table-of-contents)
### Missing ESI features
The following parts of the [ESI specification](https://www.w3.org/TR/esi-lang) are not supported, but could be in due course if a need is identified.
* `` not implemented (or advertised as a capability).
* No support for the `onerror` or `alt` attributes for ``. Instead, we "continue" on error by default.
* `` not implemented.
* The "dictionary (special)" substructure variable type for `HTTP_USER_AGENT` is not implemented.
[Back to TOC](#table-of-contents)
## API
### ledge.configure
syntax: `ledge.configure(config)`
This function provides Ledge with Redis connection details for all cache `metadata` and background jobs. This is global and cannot be specified or adjusted outside the Nginx `init` phase.
```lua
init_by_lua_block {
require("ledge").configure({
redis_connector_params = {
url = "redis://mypassword@127.0.0.1:6380/3",
}
qless_db = 4,
})
}
```
`config` is a table with the following options (unrecognised config will error hard on start up).
[Back to TOC](#table-of-contents)
#### redis_connector_params
`default: {}`
Ledge uses [lua-resty-redis-connector](https://github.com/pintsized/lua-resty-redis-connector) to handle all Redis connections. It simply passes anything given in `redis_connector_params` straight to [lua-resty-redis-connector](https://github.com/pintsized/lua-resty-redis-connector), so review the documentation there for options, including how to use [Redis Sentinel](https://redis.io/topics/sentinel).
#### qless_db
`default: 1`
Specifies the Redis DB number to store [qless](https://github.com/pintsized/lua-resty-qless) background job data.
[Back to TOC](#table-of-contents)
### ledge.set\_handler\_defaults
syntax: `ledge.set_handler_defaults(config)`
This method overrides the default configuration used for all spawned request `handler` instances. This is global and cannot be specified or adjusted outside the Nginx `init` phase, but these defaults can be overriden on a per `handler` basis. See [below](#handler-configuration-options) for a complete list of configuration options.
```lua
init_by_lua_block {
require("ledge").set_handler_defaults({
upstream_host = "127.0.0.1",
upstream_port = 8080,
})
}
```
[Back to TOC](#table-of-contents)
### ledge.create\_handler
syntax: `local handler = ledge.create_handler(config)`
Creates a `handler` instance for the current reqiest. Config given here will be merged with the defaults, allowing certain options to be adjusted on a per Nginx `location` basis.
```lua
server {
server_name example.com;
listen 80;
location / {
content_by_lua_block {
require("ledge").create_handler({
upstream_port = 8081,
}):run()
}
}
}
```
[Back to TOC](#table-of-contents)
### ledge.create\_worker
syntax: `local worker = ledge.create_worker(config)`
Creates a `worker` instance inside the current Nginx worker process, for processing background jobs. You only need to call this once inside a single `init_worker` block, and it will be called for each Nginx worker that is configured.
Job queues can be run at varying amounts of concurrency per worker, which can be set by providing `config` here. See [managing qless](#managing-qless) for more details.
```lua
init_worker_by_lua_block {
require("ledge").create_worker({
interval = 1,
gc_queue_concurrency = 1,
purge_queue_concurrency = 2,
revalidate_queue_concurrency = 5,
}):run()
}
```
[Back to TOC](#table-of-contents)
### ledge.bind
syntax: `ledge.bind(event_name, callback)`
Binds the `callback` function to the event given in `event_name`, globally for all requests on this system. Arguments to `callback` vary based on the event. See [below](#events) for event definitions.
[Back to TOC](#table-of-contents)
### handler.bind
syntax: `handler:bind(event_name, callback)`
Binds the `callback` function to the event given in `event_name` for this handler only. Note the `:` in `handler:bind()` which differs to the global `ledge.bind()`.
Arguments to `callback` vary based on the event. See [below](#events) for event definitions.
[Back to TOC](#table-of-contents)
### handler.run
syntax: `handler:run()`
Must be called during the `content_by_lua` phase. It processes the current request and serves a response. If you fail to call this method in your `location` block, nothing will happen.
[Back to TOC](#table-of-contents)
### worker.run
syntax: `handler:run()`
Must be called during the `init_worker` phase, otherwise background tasks will not be run, including garbage collection which is very importatnt.
[Back to TOC](#table-of-contents)
### Handler configuration options
* [storage_driver](#storage_driver)
* [storage_driver_config](#storage_driver_config)
* [origin_mode](#origin_mode)
* [upstream_connect_timeout](#upstream_connect_timeout)
* [upstream_send_timeout](#upstream_send_timeout)
* [upstream_read_timeout](#upstream_read_timeout)
* [upstream_keepalive_timeout](#upstream_keepalive_timeout)
* [upstream_keepalive_poolsize](#upstream_keepalive_poolsize)
* [upstream_host](#upstream_host)
* [upstream_port](#upstream_port)
* [upstream_use_ssl](#upstream_use_ssl)
* [upstream_ssl_server_name](#upstream_ssl_server_name)
* [upstream_ssl_verify](#upstream_ssl_verify)
* [buffer_size](#buffer_size)
* [advertise_ledge](#buffer_size)
* [keep_cache_for](#buffer_size)
* [minimum_old_entity_download_rate](#minimum_old_entity_download_rate)
* [esi_enabled](#esi_enabled)
* [esi_content_types](#esi_content_types)
* [esi_allow_surrogate_delegation](#esi_allow_surrogate_delegation)
* [esi_recursion_limit](#esi_recursion_limit)
* [esi_args_prefix](#esi_args_prefix)
* [esi_custom_variables](#esi_custom_variables)
* [esi_max_size](#esi_max_size)
* [esi_attempt_loopback](#esi_attempt_loopback)
* [esi_vars_cookie_blacklist](#esi_vars_cookie_blacklist)
* [esi_disable_third_party_includes](#esi_disable_third_party_includes)
* [esi_third_party_includes_domain_whitelist](#esi_third_party_includes_domain_whitelist)
* [enable_collapsed_forwarding](#enable_collapsed_forwarding)
* [collapsed_forwarding_window](#collapsed_forwarding_window)
* [gunzip_enabled](#gunzip_enabled)
* [keyspace_scan_count](#keyspace_scan_count)
* [cache_key_spec](#cache_key_spec)
* [max_uri_args](#max_uri_args)
#### storage_driver
default: `ledge.storage.redis`
This is a `string` value, which will be used to attempt to load a storage driver. Any third party driver here can accept its own config options (see below), but must provide the following interface:
* `bool new()`
* `bool connect()`
* `bool close()`
* `number get_max_size()` *(return nil for no max)*
* `bool exists(string entity_id)`
* `bool delete(string entity_id)`
* `bool set_ttl(string entity_id, number ttl)`
* `number get_ttl(string entity_id)`
* `function get_reader(object response)`
* `function get_writer(object response, number ttl, function onsuccess, function onfailure)`
*Note, whilst it is possible to configure storage drivers on a per `location` basis, it is **strongly** recommended that you never do this, and consider storage drivers to be system wide, much like the main Redis config. If you really need differenet storage driver configurations for different locations, then it will work, but features such as purging using wildcards will silently not work. YMMV.*
[Back to TOC](#handler-configuration-options)
#### storage_driver_config
`default: {}`
Storage configuration can vary based on the driver. Currently we only have a Redis driver.
[Back to TOC](#handler-configuration-options)
##### Redis storage driver config
* `redis_connector_params` Redis params table, as per [lua-resty-redis-connector](https://github.com/pintsized/lua-resty-redis-connector)
* `max_size` (bytes), defaults to `1MB`
* `supports_transactions` defaults to `true`, set to false if using a Redis proxy.
If `supports_transactions` is set to `false`, cache bodies are not written atomically. However, if there is an error writing, the main Redis system will be notified and the overall transaction will be aborted. The result being potentially orphaned body entities in the storage system, which will hopefully eventually expire. The only reason to turn this off is if you are using a Redis proxy, as any transaction related commands will break the connection.
[Back to TOC](#handler-configuration-options)
#### upstream_connect_timeout
default: `1000 (ms)`
Maximum time to wait for an upstream connection (in milliseconds). If it is exceeded, we send a `503` status code, unless [stale_if_error](#stale_if_error) is configured.
[Back to TOC](#handler-configuration-options)
#### upstream_send_timeout
default: `2000 (ms)`
Maximum time to wait sending data on a connected upstream socket (in milliseconds). If it is exceeded, we send a `503` status code, unless [stale_if_error](#stale_if_error) is configured.
[Back to TOC](#handler-configuration-options)
#### upstream_read_timeout
default: `10000 (ms)`
Maximum time to wait on a connected upstream socket (in milliseconds). If it is exceeded, we send a `503` status code, unless [stale_if_error](#stale_if_error) is configured.
[Back to TOC](#handler-configuration-options)
#### upstream_keepalive_timeout
default: `75000`
[Back to TOC](#handler-configuration-options)
#### upstream_keepalive_poolsize
default: `64`
[Back to TOC](#handler-configuration-options)
#### upstream_host
default: `""`
Specifies the hostname or IP address of the upstream host. If a hostname is specified, you must configure the Nginx [resolver](http://nginx.org/en/docs/http/ngx_http_core_module.html#resolver) somewhere, for example:
```nginx
resolver 8.8.8.8;
```
[Back to TOC](#handler-configuration-options)
#### upstream_port
default: `80`
Specifies the port of the upstream host.
[Back to TOC](#handler-configuration-options)
#### upstream_use_ssl
default: `false`
Toggles the use of SSL on the upstream connection. Other `upstream_ssl_*` options will be ignored if this is not set to `true`.
[Back to TOC](#handler-configuration-options)
#### upstream_ssl_server_name
default: `""`
Specifies the SSL server name used for Server Name Indication (SNI). See [sslhandshake](https://github.com/openresty/lua-nginx-module#tcpsocksslhandshake) for more information.
[Back to TOC](#handler-configuration-options)
#### upstream_ssl_verify
default: `true`
Toggles SSL verification. See [sslhandshake](https://github.com/openresty/lua-nginx-module#tcpsocksslhandshake) for more information.
[Back to TOC](#handler-configuration-options)
#### cache_key_spec
`default: cache_key_spec = { "scheme", "host", "uri", "args" },`
Specifies the format for creating cache keys. The default spec above will create keys in Redis similar to:
```
ledge:cache:http:example.com:/about::
ledge:cache:http:example.com:/about:p=2&q=foo:
```
The list of available string identifiers in the spec is:
* `scheme` either http or https
* `host` the hostname of the current request
* `port` the public port of the current request
* `uri` the URI (without args)
* `args` the URI args, sorted alphabetically
In addition to these string identifiers, dynamic parameters can be added to the cache key by providing functions. Any functions given must expect no arguments and return a string value.
```lua
local function get_device_type()
-- dynamically work out device type
return "tablet"
end
require("ledge").create_handler({
cache_key_spec = {
get_device_type,
"scheme",
"host",
"uri",
"args",
}
}):run()
```
Consider leveraging vary, via the [before_vary_selection](#before_vary_selection) event, for separating cache entries rather than modifying the main `cache_key_spec` directly.
[Back to TOC](#handler-configuration-options)
#### origin_mode
default: `ledge.ORIGIN_MODE_NORMAL`
Determines the overall behaviour for connecting to the origin. `ORIGIN_MODE_NORMAL` will assume the origin is up, and connect as necessary.
`ORIGIN_MODE_AVOID` is similar to Squid's `offline_mode`, where any retained cache (expired or not) will be served rather than trying the origin, regardless of cache-control headers, but the origin will be tried if there is no cache to serve.
`ORIGIN_MODE_BYPASS` is the same as `AVOID`, except if there is no cache to serve we send a `503 Service Unavailable` status code to the client and never attempt an upstream connection.
[Back to TOC](#handler-configuration-options)
#### keep_cache_for
default: `86400 * 30 (1 month in seconds)`
Specifies how long to retain cache data past its expiry date. This allows us to serve stale cache in the event of upstream failure with [stale_if_error](#stale_if_error) or [origin_mode](#origin_mode) settings.
Items will be evicted when under memory pressure provided you are using one of the Redis [volatile eviction policies](http://redis.io/topics/lru-cache), so there should generally be no real need to lower this for space reasons.
Items at the extreme end of this (i.e. nearly a month old) are clearly very rarely requested, or more likely, have been removed at the origin.
[Back to TOC](#handler-configuration-options)
#### minimum_old_entity_download_rate
default: `56 (kbps)`
Clients reading slower than this who are also unfortunate enough to have started reading from an entity which has been replaced (due to another client causing a revalidation for example), may have their entity garbage collected before they finish, resulting in an incomplete resource being delivered.
Lowering this is fairer on slow clients, but widens the potential window for multiple old entities to stack up, which in turn could threaten Redis storage space and force evictions.
[Back to TOC](#handler-configuration-options)
#### enable_collapsed_forwarding
default: `false`
[Back to TOC](#handler-configuration-options)
#### collapsed_forwarding_window
When collapsed forwarding is enabled, if a fatal error occurs during the origin request, the collapsed requests may never receive the response they are waiting for. This setting puts a limit on how long they will wait, and how long before new requests will decide to try the origin for themselves.
If this is set shorter than your origin takes to respond, then you may get more upstream requests than desired. Fatal errors (server reboot etc) may result in hanging connections for up to the maximum time set. Normal errors (such as upstream timeouts) work independently of this setting.
[Back to TOC](#handler-configuration-options)
#### gunzip_enabled
default: `true`
With this enabled, gzipped responses will be uncompressed on the fly for clients that do not set `Accept-Encoding: gzip`. Note that if we receive a gzipped response for a resource containing ESI instructions, we gunzip whilst saving and store uncompressed, since we need to read the ESI instructions.
Also note that `Range` requests for gzipped content must be ignored - the full response will be returned.
[Back to TOC](#handler-configuration-options)
#### buffer_size
default: `2^16 (64KB in bytes)`
Specifies the internal buffer size (in bytes) used for data to be read/written/served. Upstream responses are read in chunks of this maximum size, preventing allocation of large amounts of memory in the event of receiving large files. Data is also stored internally as a list of chunks, and delivered to the Nginx output chain buffers in the same fashion.
The only exception is if ESI is configured, and Ledge has determined there are ESI instructions to process, and any of these instructions span a given chunk. In this case, buffers are concatenated until a complete instruction is found, and then ESI operates on this new buffer, up to a maximum of [esi_max_size](#esi_max_size).
[Back to TOC](#handler-configuration-options)
#### keyspace_scan_count
default: `1000`
Tunes the behaviour of keyspace scans, which occur when sending a PURGE request with wildcard syntax. A higher number may be better if latency to Redis is high and the keyspace is large.
[Back to TOC](#handler-configuration-options)
#### max_uri_args
default: `100`
Limits the number of URI arguments returned in calls to [ngx.req.get_uri_args()](https://github.com/openresty/lua-nginx-module#ngxreqget_uri_args), to protect against DOS attacks.
[Back to TOC](#handler-configuration-options)
#### esi_enabled
default: `false`
Toggles [ESI](http://www.w3.org/TR/esi-lang) scanning and processing, though behaviour is also contingent upon [esi_content_types](#esi_content_types) and [esi_surrogate_delegation](#esi_surrogate_delegation) settings, as well as `Surrogate-Control` / `Surrogate-Capability` headers.
ESI instructions are detected on the slow path (i.e. when fetching from the origin), so only instructions which are known to be present are processed on cache HITs.
[Back to TOC](#handler-configuration-options)
#### esi_content_types
default: `{ text/html }`
Specifies content types to perform ESI processing on. All other content types will not be considered for processing.
[Back to TOC](#handler-configuration-options)
#### esi_allow_surrogate_delegation
default: false
[ESI Surrogate Delegation](http://www.w3.org/TR/edge-arch) allows for downstream intermediaries to advertise a capability to process ESI instructions nearer to the client. By setting this to `true` any downstream offering this will disable ESI processing in Ledge, delegating it downstream.
When set to a Lua table of IP address strings, delegation will only be allowed to this specific hosts. This may be important if ESI instructions contain sensitive data which must be removed.
[Back to TOC](#handler-configuration-options)
#### esi_recursion_limit
default: 10
Limits fragment inclusion nesting, to avoid accidental infinite recursion.
[Back to TOC](#handler-configuration-options)
#### esi_args_prefix
default: "esi\_"
URI args prefix for parameters to be ignored from the cache key (and not proxied upstream), for use exclusively with ESI rendering logic. Set to nil to disable the feature.
[Back to TOC](#handler-configuration-options)
#### esi_custom_variables
defualt: `{}`
Any variables supplied here will be available anywhere ESI vars can be used evaluated. See [Custom ESI variables](#custom-esi-variables).
[Back to TOC](#handler-configuration-options)
#### esi_max_size
default: `1024 * 1024 (bytes)`
[Back to TOC](#handler-configuration-options)
#### esi_attempt_loopback
default: `true`
If an ESI subrequest has the same `scheme` and `host` as the parent request, we loopback the connection to the current
`server_addr` and `server_port` in order to avoid going over network.
[Back to TOC](#handler-configuration-options)
#### esi_vars_cookie_blacklist
default: `{}`
Cookie names given here will not be expandable as ESI variables: e.g. `$(HTTP_COOKIE)` or `$(HTTP_COOKIE{foo})`. However they
are not removed from the request data, and will still be propagated to `` subrequests.
This is useful if your client is sending a sensitive cookie that you don't ever want to accidentally evaluate in server output.
```lua
require("ledge").create_handler({
esi_vars_cookie_blacklist = {
secret = true,
["my-secret-cookie"] = true,
}
}):run()
```
Cookie names are given as the table key with a truthy value, for O(1) runtime lookup.
[Back to TOC](#handler-configuration-options)
#### esi_disable_third_party_includes
default: `false`
`` tags can make requests to any arbitrary URI. Turn this on to ensure the URI domain must match the URI of the current request.
[Back to TOC](#handler-configuration-options)
#### esi_third_party_includes_domain_whitelist
default: `{}`
If third party includes are disabled, you can also explicitly provide a whitelist of allowed third party domains.
```lua
require("ledge").create_handler({
esi_disable_third_party_includes = true,
esi_third_party_includes_domain_whitelist = {
["example.com"] = true,
}
}):run()
```
Hostnames are given as the table key with a truthy value, for O(1) lookup.
*Note; This behaviour was introduced in v2.2*
[Back to TOC](#handler-configuration-options)
#### advertise_ledge
default `true`
If set to false, disables advertising the software name and version, e.g. `(ledge/2.01)` from the `Via` response header.
[Back to TOC](#handler-configuration-options)
### Events
* [after_cache_read](#after_cache_read)
* [before_upstream_connect](#before_upstream_connect)
* [before_upstream_request](#before_upstream_request)
* [before_esi_inclulde_request"](#before_esi_include_request)
* [after_upstream_request](#after_upstream_request)
* [before_save](#before_save)
* [before_serve](#before_serve)
* [before_save_revalidation_data](#before_save_revalidation_data)
* [before_vary_selection](#before_vary_selection)
#### after_cache_read
syntax: `bind("after_cache_read", function(res) -- end)`
params: `res`. The cached response table.
Fires directly after the response was successfully loaded from cache.
The `res` table given contains:
* `res.header` the table of case-insenitive HTTP response headers
* `res.status` the HTTP response status code
*Note; there are other fields and methods attached, but it is strongly advised to never adjust anything other than the above*
[Back to TOC](#events)
#### before_upstream_connect
syntax: `bind("before_upstream_connect", function(handler) -- end)`
params: `handler`. The current handler instance.
Fires before the default `handler.upstream_client` is created, allowing a pre-connected HTTP client to be externally provided. The client must be API compatible with [lua-resty-http](https://github.com/pintsized/lua-resty-http). For example, using [lua-resty-upstream](https://github.com/hamishforbes/lua-resty-upstream) for load balancing.
[Back to TOC](#events)
#### before_upstream_request
syntax: `bind("before_upstream_request", function(req_params) -- end)`
params: `req_params`. The table of request params about to send to the [request](https://github.com/pintsized/lua-resty-http#request) method.
Fires when about to perform an upstream request.
[Back to TOC](#events)
#### before_esi_include_request
syntax: `bind("before_esi_include_request", function(req_params) -- end)`
params: `req_params`. The table of request params about to be used for an ESI include, via the [request](https://github.com/pintsized/lua-resty-http#request) method.
Fires when about to perform a HTTP request on behalf of an ESI include instruction.
[Back to TOC](#events)
#### after_upstream_request
syntax: `bind("after_upstream_request", function(res) -- end)`
params: `res` The response table.
Fires when the status / headers have been fetched, but before the body it is stored. Typically used to override cache headers before we decide what to do with this response.
The `res` table given contains:
* `res.header` the table of case-insenitive HTTP response headers
* `res.status` the HTTP response status code
*Note; there are other fields and methods attached, but it is strongly advised to never adjust anything other than the above*
*Note: unlike `before_save` below, this fires for all fetched content, not just cacheable content.*
[Back to TOC](#events)
#### before_save
syntax: `bind("before_save", function(res) -- end)`
params: `res` The response table.
Fires when we're about to save the response.
The `res` table given contains:
* `res.header` the table of case-insenitive HTTP response headers
* `res.status` the HTTP response status code
*Note; there are other fields and methods attached, but it is strongly advised to never adjust anything other than the above*
[Back to TOC](#events)
#### before_serve
syntax: `ledge:bind("before_serve", function(res) -- end)`
params: `res` The `ledge.response` object.
Fires when we're about to serve. Often used to modify downstream headers.
The `res` table given contains:
* `res.header` the table of case-insenitive HTTP response headers
* `res.status` the HTTP response status code
*Note; there are other fields and methods attached, but it is strongly advised to never adjust anything other than the above*
[Back to TOC](#events)
#### before_save_revalidation_data
syntax: `bind("before_save_revalidation_data", function(reval_params, reval_headers) -- end)`
params: `reval_params`. Table of revalidation params.
params: `reval_headers`. Table of revalidation HTTP headers.
Fires when a background revalidation is triggered or when cache is being saved. Allows for modifying the headers and paramters (such as connection parameters) which are inherited by the background revalidation.
The `reval_params` are values derived from the current running configuration for:
* server_addr
* server_port
* scheme
* uri
* connect_timeout
* read_timeout
* ssl_server_name
* ssl_verify
[Back to TOC](#events)
#### before_vary_selection
syntax: `bind("before_vary_selection", function(vary_key) -- end)`
params: `vary_key` A table of selecting headers
Fires when we're about to generate the vary key, used to select the correct cache representation.
The `vary_key` table is a hash of header field names (lowercase) to values.
A field name which exists in the Vary response header but does not exist in the current request header will have a value of `ngx.null`.
```
Request Headers:
Accept-Encoding: gzip
X-Test: abc
X-test: def
Response Headers:
Vary: Accept-Encoding, X-Test
Vary: X-Foo
vary_key table:
{
["accept-encoding"] = "gzip",
["x-test"] = "abc,def",
["x-foo"] = ngx.null
}
```
[Back to TOC](#events)
## Administration
### X-Cache
Ledge adds the non-standard `X-Cache` header, familiar to users of other caches. It indicates simply `HIT` or `MISS` and the host name in question, preserving upstream values when more than one cache server is in play.
If a resource is considered not cacheable, the `X-Cache` header will not be present in the response.
For example:
* `X-Cache: HIT from ledge.tld` *A cache hit, with no (known) cache layer upstream.*
* `X-Cache: HIT from ledge.tld, HIT from proxy.upstream.tld` *A cache hit, also hit upstream.*
* `X-Cache: MISS from ledge.tld, HIT from proxy.upstream.tld` *A cache miss, but hit upstream.*
* `X-Cache: MISS from ledge.tld, MISS from proxy.upstream.tld` *Regenerated at the origin.*
[Back to TOC](#table-of-contents)
### Logging
It's often useful to add some extra headers to your Nginx logs, for example
```
log_format ledge '$remote_addr - $remote_user [$time_local] '
'"$request" $status $body_bytes_sent '
'"$http_referer" "$http_user_agent" '
'"Cache:$sent_http_x_cache" "Age:$sent_http_age" "Via:$sent_http_via"'
;
access_log /var/log/nginx/access_log ledge;
```
Will give log lines such as:
```
192.168.59.3 - - [23/May/2016:22:22:18 +0000] "GET /x/y/z HTTP/1.1" 200 57840 "-" "curl/7.37.1""Cache:HIT from 159e8241f519:8080" "Age:724"
```
[Back to TOC](#table-of-contents)
### Managing Qless
Ledge uses [lua-resty-qless](https://github.com/pintsized/lua-resty-qless) to schedule and process background tasks, which are stored in Redis.
Jobs are scheduled for background revalidation requests as well as wildcard PURGE requests, but most importantly for garbage collection of replaced body entities.
That is, it's very important that jobs are being run properly and in a timely fashion.
Installing the [web user interface](https://github.com/hamishforbes/lua-resty-qless-web) can be very helpful to check this.
You may also wish to tweak the [qless job history](https://github.com/pintsized/lua-resty-qless#configuration-options) settings if it takes up too much space.
[Back to TOC](#table-of-contents)
## Author
James Hurst
## Licence
This module is licensed under the 2-clause BSD license.
Copyright (c) James Hurst
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
================================================
FILE: dist.ini
================================================
name=ledge
abstract=An RFC compliant and ESI capable HTTP cache for Nginx / OpenResty, backed by Redis
author=James Hurst, Hamish Forbes
is_original=yes
license=2bsd
lib_dir=lib
repo_link=https://github.com/pintsized/ledge
main_module=lib/ledge.lua
requires = pintsized/lua-resty-http >= 0.11, pintsized/lua-resty-redis-connector >= 0.06, pintsized/lua-resty-qless >= 0.11, p0pr0ck5/lua-resty-cookie >= 0.01, hamishforbes/lua-ffi-zlib >= 0.3.0
================================================
FILE: docker/tests/docker-compose.yml
================================================
version: '3'
services:
runner:
image: "ledgetech/test-runner:latest"
volumes:
- ../../:/code
# Use this to mount any local Lua dependencies, overriding
# published versions
- ${EXTLIB-../../lib}:/code/extlib
environment:
- TEST_FILE
command: /bin/bash -c "TEST_LEDGE_REDIS_HOST=redis make coverage"
working_dir: /code
depends_on:
- redis
redis:
image: "redis:alpine"
================================================
FILE: lib/ledge/background.lua
================================================
local require = require
local math_ceil = math.ceil
local qless = require("resty.qless")
local _M = {
_VERSION = "2.3.0",
}
local function put_background_job( queue, klass, data, options)
local q = qless.new({
get_redis_client = require("ledge").create_qless_connection
})
-- If we've been specified a jid (i.e. a non random jid), putting this
-- job will overwrite any existing job with the same jid.
-- We test for a "running" state, and if so we silently drop this job.
if options.jid then
local existing = q.jobs:get(options.jid)
if existing and existing.state == "running" then
return nil, "Job with the same jid is currently running"
end
end
-- Put the job
local res, err = q.queues[queue]:put(klass, data, options)
q:redis_close()
if res then
return {
jid = res,
klass = klass,
options = options,
}
else
return res, err
end
end
_M.put_background_job = put_background_job
-- Calculate when to GC an entity based on its size and the minimum download
-- rate setting, plus 1 second of arbitrary latency for good measure.
local function gc_wait(entity_size, minimum_download_rate)
local dl_rate_Bps = minimum_download_rate * 128
return math_ceil((entity_size / dl_rate_Bps)) + 1
end
_M.gc_wait = gc_wait
return _M
================================================
FILE: lib/ledge/cache_key.lua
================================================
local ipairs, next, type, pcall, setmetatable =
ipairs, next, type, pcall, setmetatable
local str_lower = string.lower
local ngx_log = ngx.log
local ngx_ERR = ngx.ERR
local ngx_var = ngx.var
local ngx_null = ngx.null
local tbl_insert = table.insert
local tbl_concat = table.concat
local tbl_sort = table.sort
local req_args_sorted = require("ledge.request").args_sorted
local req_default_args = require("ledge.request").default_args
local get_fixed_field_metatable_proxy =
require("ledge.util").mt.get_fixed_field_metatable_proxy
local http_headers = require("resty.http_headers")
local _M = {
_VERSION = "2.3.0",
}
-- Generates the root key. The default spec is:
-- ledge:cache_obj:http:example.com:/about:p=3&q=searchterms
local function generate_root_key(key_spec, max_args)
-- If key_spec is empty, provide a default
if not key_spec or not next(key_spec) then
key_spec = {
"scheme",
"host",
"uri",
"args",
}
end
local key = {
"ledge",
"cache",
}
for _, field in ipairs(key_spec) do
if field == "scheme" then
tbl_insert(key, ngx_var.scheme)
elseif field == "host" then
tbl_insert(key, ngx_var.host)
elseif field == "port" then
tbl_insert(key, ngx_var.server_port)
elseif field == "uri" then
tbl_insert(key, ngx_var.uri)
elseif field == "args" then
tbl_insert(
key,
req_args_sorted(max_args) or req_default_args()
)
elseif type(field) == "function" then
local ok, res = pcall(field)
if not ok then
ngx_log(ngx_ERR,
"error in function supplied to cache_key_spec: ", res
)
elseif type(res) ~= "string" then
ngx_log(ngx_ERR,
"functions supplied to cache_key_spec must " ..
"return a string"
)
else
tbl_insert(key, res)
end
end
end
return tbl_concat(key, ":")
end
_M.generate_root_key = generate_root_key
-- Read the list of vary headers from redis
local function read_vary_spec(redis, root_key)
if not redis or not next(redis) then
return nil, "Redis required"
end
if not root_key then
return nil, "Root key required"
end
local res, err = redis:smembers(root_key.."::vary")
if err then
return nil, err
end
table.sort(res)
return res
end
_M.read_vary_spec = read_vary_spec
local function vary_compare(spec_a, spec_b)
if (not spec_a or not next(spec_a)) then
if (not spec_b or not next(spec_b)) then
-- both nil or empty
return true
else
-- spec_b is set but spec_a is empty
return false
end
elseif (spec_b and next(spec_b)) then
local outer_match = true
-- Loop over all values in spec_a
for _, v in ipairs(spec_a) do
local match = false
-- Look for a match in spec_b
for _, v2 in ipairs(spec_b) do
if v == v2 then
match = true
break
end
end
-- Didn't match any values in spec_b
if match == false then
outer_match = false
break
end
end
return outer_match
end
-- spec_a is a thing but spec_b is not
return false
end
_M.vary_compare = vary_compare
local function generate_vary_key(vary_spec, callback, headers)
local vary_key = http_headers.new()
if vary_spec and next(vary_spec) then
headers = headers or ngx.req.get_headers()
for _, h in ipairs(vary_spec) do
local v = headers[h]
if type(v) == "table" then
v = tbl_concat(v, ",")
end
-- ngx.null represents a key which was in the spec
-- but has no matching request header
vary_key[h] = v or ngx_null
end
end
-- Callback allows user to modify the key
if type(callback) == "function" then
callback(vary_key)
end
if not next(vary_key) then
return ""
end
-- Extract keys and sort them
local keys = {}
for k,v in pairs(vary_key) do
if v ~= ngx_null then
tbl_insert(keys, k)
end
end
tbl_sort(keys)
-- Convert hash table to flat array
local t = {}
local i = 1
for _, k in ipairs(keys) do
t[i] = k
t[i + 1] = vary_key[k]
i = i + 2
end
return str_lower(tbl_concat(t, ":"))
end
_M.generate_vary_key = generate_vary_key
-- Returns the key chain for all cache keys, except the body entity
local function key_chain(root_key, vary_key, vary_spec)
if not root_key then
return nil, "Missing root key"
end
if not vary_key then
return nil, "Missing vary key"
end
if not vary_spec then
return nil, "Missing vary_spec"
end
local full_key = root_key .. "#" .. vary_key
-- Apply metatable
local key_chain = setmetatable({
-- hash: cache key metadata
main = full_key .. "::main",
-- sorted set: current entities score with sizes
entities = full_key .. "::entities",
-- hash: response headers
headers = full_key .. "::headers",
-- hash: request headers for revalidation
reval_params = full_key .. "::reval_params",
-- hash: request params for revalidation
reval_req_headers = full_key .. "::reval_req_headers",
}, get_fixed_field_metatable_proxy({
-- Hide these keys from iterators
-- These are not actual keys but useful to keep around
root = root_key,
full = full_key,
vary_spec = vary_spec,
-- set: headers upon which to vary
vary = root_key .. "::vary",
-- set: representations for this root key
repset = root_key .. "::repset",
-- Lock key for collapsed forwarding
fetching_lock = full_key .. "::fetching",
})
)
return key_chain
end
_M.key_chain = key_chain
local function clean_repset(redis, repset)
-- Ensure representation set only includes keys which actually exist
-- This only runs on the slow path at save time so should be ok?
-- Prevents this set from growing perpetually if there are unique variations
-- TODO use scan here incase the set is pathologically huge?
-- Has to be able to run in a transaction so maybe a housekeeping qless job?
local clean = [[
local repset = KEYS[1]
local reps = redis.call("SMEMBERS", repset)
for _, rep in ipairs(reps) do
if redis.call("EXISTS", rep.."::main") == 0 then
redis.call("SREM", repset, rep)
end
end
]]
local res, err = redis:eval(clean, 1, repset)
if not res or res == ngx_null then
return nil, err
end
return true
end
local function save_key_chain(redis, key_chain, ttl)
if not redis then
return nil, "Redis required"
end
if type(key_chain) ~= "table" or not next(key_chain) then
return nil, "Key chain required"
end
if not tonumber(ttl) then
return nil, "TTL must be a number"
end
-- Delete the current set of vary headers
local _, e = redis:del(key_chain.vary)
if e then ngx_log(ngx_ERR, e) end
local vary_spec = key_chain.vary_spec
if next(vary_spec) then
-- Always lowercase all vary fields
-- key_chain.vary is a set so will deduplicate for us
for i,v in ipairs(vary_spec) do
vary_spec[i] = str_lower(v)
end
local _, e = redis:sadd(key_chain.vary, unpack(vary_spec))
if e then ngx_log(ngx_ERR, e) end
local _, e = redis:expire(key_chain.vary, ttl)
if e then ngx_log(ngx_ERR, e) end
end
-- Add this representation to the set
local _, e = redis:sadd(key_chain.repset, key_chain.full)
if e then ngx_log(ngx_ERR, e) end
local _, e = redis:expire(key_chain.repset, ttl)
if e then ngx_log(ngx_ERR, e) end
local _, e = clean_repset(redis, key_chain.repset)
if e then ngx_log(ngx_ERR, e) end
return true
end
_M.save_key_chain = save_key_chain
return _M
================================================
FILE: lib/ledge/collapse.lua
================================================
local _M = {
_VERSION = "2.3.0",
}
-- Attempts to set a lock key in redis. The lock will expire after
-- the expiry value if it is not cleared (i.e. in case of errors).
-- Returns true if the lock was acquired, false if the lock already
-- exists, and nil, err in case of failure.
local function acquire_lock(redis, lock_key, timeout)
-- We use a Lua script to emulate SETNEX (set if not exists with expiry).
-- This avoids a race window between the GET / SETEX.
-- Params: key, expiry
-- Return: OK or BUSY
local SETNEX = [[
local lock = redis.call("GET", KEYS[1])
if not lock then
return redis.call("PSETEX", KEYS[1], ARGV[1], "locked")
else
return "BUSY"
end
]]
local res, err = redis:eval(SETNEX, 1, lock_key, timeout)
if not res then -- Lua script failed
return nil, err
elseif res == "OK" then -- We have the lock
return true
elseif res == "BUSY" then -- Lock is busy
return false
end
end
_M.acquire_lock = acquire_lock
return _M
================================================
FILE: lib/ledge/esi/processor_1_0.lua
================================================
local http = require "resty.http"
local cookie = require "resty.cookie"
local tag_parser = require "ledge.esi.tag_parser"
local util = require "ledge.util"
local tostring, type, tonumber, next, unpack, pcall, setfenv, loadstring =
tostring, type, tonumber, next, unpack, pcall, setfenv, loadstring
local str_sub = string.sub
local str_byte = string.byte
-- TODO: Find places we can use str_find over ngx_re_find
local str_find = string.find
local tbl_concat = table.concat
local tbl_insert = table.insert
local co_yield = coroutine.yield
local co_wrap = util.coroutine.wrap
local ngx_re_gsub = ngx.re.gsub
local ngx_re_sub = ngx.re.sub
local ngx_re_match = ngx.re.match
local ngx_re_find = ngx.re.find
local ngx_req_get_headers = ngx.req.get_headers
local ngx_req_get_uri_args = ngx.req.get_uri_args
local ngx_flush = ngx.flush
local ngx_var = ngx.var
local ngx_log = ngx.log
local ngx_ERR = ngx.ERR
local ngx_INFO = ngx.INFO
local get_fixed_field_metatable_proxy =
require("ledge.util").mt.get_fixed_field_metatable_proxy
local _M = {
_VERSION = "2.3.0",
}
function _M.new(handler)
return setmetatable({
handler = handler,
token = "ESI/1.0",
}, get_fixed_field_metatable_proxy(_M))
end
-- $1: variable name (e.g. QUERY_STRING)
-- $2: substructure key
-- $3: default value
-- $4: default value if quoted
local esi_var_pattern =
[[\$\(([A-Z_]+){?([a-zA-Z\.\-~_%0-9]*)}?\|?(?:([^\s\)']+)|'([^\')]+)')?\)]]
-- Evaluates a given ESI variable.
local function _esi_eval_var(var)
-- Extract variables from capture results table
local var_name = var[1] or ""
local key = var[2]
if key == "" then key = nil end
local default = var[3]
local default_quoted = var[4]
local default = default or default_quoted or ""
if var_name == "QUERY_STRING" then
if not key then
-- We don't have a key so give them the whole string
return ngx_var.args or default
else
-- Lookup the querystring component by key
local value = ngx_req_get_uri_args()[key]
if value then
if type(value) == "table" then
return tbl_concat(value, ", ")
else
return value
end
else
return default
end
end
elseif str_sub(var_name, 1, 5) == "HTTP_" then
-- Evaluate request headers. Cookie and Accept-Language are special
-- according to the spec.
local header = str_sub(var_name, 6)
if header == "COOKIE" then
local cookies = ngx.ctx.__ledge_esi_cookies or cookie:new()
local blacklist = ngx.ctx.__ledge_esi_vars_cookie_blacklist or {}
if not next(blacklist) then
if key then
return cookies:get(key) or default
end
return ngx_var.http_cookie or default
end
-- We have a blacklist to filter with
if key then
if not blacklist[key] then
return cookies:get(key) or default
end
return default
else
-- We need a full cookie string, with any blacklisted values removed
local cookies = cookies:get_all()
local value = {}
for k, v in pairs(cookies) do
if not blacklist[k] then
tbl_insert(value, k .. "=" .. v)
end
end
return tbl_concat(value, "; ") or default
end
else
local value = ngx_req_get_headers()[header]
if not value then
return default
elseif header == "ACCEPT_LANGUAGE" and key then
-- If we're a table (multilple Accept-Language headers), convert
-- to string
if type(value) == "table" then
value = tbl_concat(value, ", ")
end
if ngx_re_find(value, key, "oj") then
return "true"
else
return "false"
end
elseif type(value) == "table" then
-- For normal repeated headers, numeric indexes are supported
key = tonumber(key)
if key then
-- We can index numerically (0 indexed)
return tostring(value[key + 1] or default)
else
-- Without a numeric key, render as a comma separated list
return tbl_concat(value, ", ") or default
end
else
return value
end
end
elseif var_name == "ESI_ARGS" then
local esi_args = ngx.ctx.__ledge_esi_args
if not esi_args then
-- No ESI args in request
return default
end
if not key then
-- __tostring metamethod turns these back into encoded URI args
return tostring(esi_args)
else
local value = esi_args[key] or default
if type(value) == "table" then
return tbl_concat(value, ",")
end
return tostring(value)
end
else
local custom_variables = ngx.ctx.__ledge_esi_custom_variables
if next(custom_variables) then
local var = custom_variables[var_name]
if var then
if key then
if type(var) == "table" then
return tostring(var[key] or default)
end
else
if type(var) == "table" then
-- No sane way to stringify other tables
return default
else
-- We're a string
return var or default
end
end
end
end
return default
end
end
local function esi_eval_var(var)
local escape = true
local var_name = var[1]
-- If var name begins with RAW_ do not escape
local b1, b2, b3, b4 = str_byte(var_name, 1, 4)
if b1 == 82 and b2 == 65 and b3 == 87 and b4 == 95 then
escape = false
var[1] = str_sub(var_name, 5, -1)
end
local res = _esi_eval_var(var)
-- Always escape ESI tags in ESI variables
if escape or str_find(res, "", ">", "soj")
end
return res
end
_M.esi_eval_var = esi_eval_var
local function esi_replace_vars(str, cb)
cb = cb or esi_eval_var
return ngx_re_gsub(str, esi_var_pattern, cb, "soj")
end
local function esi_eval_var_in_when_tag(var)
var = esi_eval_var(var)
-- Quote unless we can be considered a number
local number = tonumber(var)
if number then
return number
else
-- Strings must be enclosed in single quotes, so also backslash
-- escape single quotes within the value
return "\'" .. ngx_re_gsub(var, "'", "\\'", "oj") .. "\'"
end
end
local function _esi_condition_lexer(condition)
-- ESI to Lua operators
local op_replacements = {
["!="] = "~=",
["|"] = " or ",
["&"] = " and ",
["||"] = " or ",
["&&"] = " and ",
["!"] = " not ",
}
-- Mapping of types to types they are allowed to follow
local lexer_rules = {
number = {
["nil"] = true,
["operator"] = true,
},
string = {
["nil"] = true,
["operator"] = true,
},
operator = {
["nil"] = true,
["number"] = true,
["string"] = true,
["operator"] = true,
},
}
-- $1: number
-- $2: string
-- $3: operator
local p =[[(\d+(?:\.\d+)?)|(?:'(.*?)(?=|>|<)]]
local ctx = {}
local tokens = {}
local prev_type
local expecting_pattern = false
repeat
local token, err = ngx_re_match(condition, p, "", ctx)
if err then ngx_log(ngx_ERR, err) end
if token then
local number, string, operator = token[1], token[2], token[3]
local token_type
if number then
token_type = "number"
tbl_insert(tokens, number)
elseif string then
token_type = "string"
-- Check to see if we're expecing a regex pattern
if expecting_pattern then
-- Extract the pattern and options
local re = ngx_re_match(
string,
[[\/(.*?)(?)]]
local after -- Will contain anything after the last closing choose tag
local chunk_has_conditionals = false
repeat
local choose, ch_before, ch_after = parser:next("esi:choose")
if choose and choose.closing then
chunk_has_conditionals = true
-- Anything before this choose should just be output
if ch_before then
tbl_insert(res, ch_before)
end
-- If this ends up being the last choose tag, content after this
-- should be output
if ch_after then
after = ch_after
end
local inner_parser = tag_parser.new(choose.contents)
local when_matched = false
local otherwise
repeat
local tag = inner_parser:next("esi:when|esi:otherwise")
if tag and tag.closing then
if tag.tagname == "esi:when" and when_matched == false then
local function process_when(m_when)
-- We only show the first matching branch, others
-- must be removed even if they also match.
if when_matched then return "" end
local condition = m_when[1]
if _esi_evaluate_condition(condition) then
when_matched = true
if ngx_re_find(tag.contents, "") then
-- recurse
evaluate_conditionals(
tag.contents,
res,
recursion + 1
)
else
tbl_insert(res, tag.contents)
end
end
return ""
end
local ok, err = ngx_re_sub(
tag.whole,
esi_when_pattern,
process_when
)
if not ok and err then ngx_log(ngx_ERR, err) end
-- Break after the first winning expression
elseif tag.tagname == "esi:otherwise" then
otherwise = tag.contents
end
end
until not tag
if not when_matched and otherwise then
if ngx_re_find(otherwise, "") then
-- recurse
evaluate_conditionals(otherwise, res, recursion + 1)
else
tbl_insert(res, otherwise)
end
end
end
until not choose
if after then
tbl_insert(res, after)
end
-- Variables inside ESI tags should be evaluated.
-- Return hint to eval this chunk
if not chunk_has_conditionals then
return chunk, false
else
return tbl_concat(res), true
end
end
-- Used in esi_process_vars_tag. Declared locally to avoid runtime closure
local function _esi_gsub_vars(m)
return esi_replace_vars(m[2])
end
-- Replaces all variables in blocks.
-- Also removes the tags themselves.
local function esi_process_vars_tag(chunk)
if str_find(chunk, "esi:vars", 1, true) == nil then
return chunk
end
-- For every esi:vars block, substitute any number of variables found.
return ngx_re_gsub(chunk,
"()(.*?)()",
_esi_gsub_vars,
"soj"
)
end
_M.esi_process_vars_tag = esi_process_vars_tag
local function process_escaping(chunk, res, recursion)
if not recursion then recursion = 0 end
if not res then res = {} end
local parser = tag_parser.new(chunk)
local chunk_has_escaping = false
repeat
local tag, before, after = parser:next("!--esi")
if tag and tag.closing then
chunk_has_escaping = true
if before then
tbl_insert(res, before)
end
-- If there are more nested, recurse
if ngx_re_find(tag.contents, "
chunk = process_escaping(chunk)
-- Remove comments.
chunk = esi_process_comment_tags(chunk)
-- Remove '
chunk = esi_process_vars_tag(chunk)
-- Evaluate choose / when / otherwise conditions...
local chunk, should_eval = evaluate_conditionals(chunk)
-- Process ESI includes
-- Will yield content to the outer reader
esi_process_include_tags(self, chunk, esi_abort_flag, buffer_size, should_eval)
else
co_yield(chunk)
end
end
until not chunk
end)
-- Outer filter, which checks for an esi:abort_includes instruction,
-- so that we can handle accidental recursion.
repeat
local chunk, err = inner_reader(buffer_size)
if err then ngx_log(ngx_ERR, err) end
if chunk then
-- If we see an abort instruction, we set a flag to stop
-- further esi:includes.
if str_find(chunk, "",
"",
"soj"
)
end
co_yield(chunk)
end
until not chunk
end)
end
return _M
================================================
FILE: lib/ledge/esi/tag_parser.lua
================================================
local setmetatable, type =
setmetatable, type
local str_sub = string.sub
local ngx_re_find = ngx.re.find
local ngx_re_match = ngx.re.match
local ngx_log = ngx.log
local ngx_ERR = ngx.ERR
local get_fixed_field_metatable_proxy =
require("ledge.util").mt.get_fixed_field_metatable_proxy
local _M = {
_VERSION = "2.3.0",
}
function _M.new(content, offset)
return setmetatable({
content = content,
pos = (offset or 0),
open_comments = 0,
}, get_fixed_field_metatable_proxy(_M))
end
function _M.next(self, tagname)
local tag = self:find_whole_tag(tagname)
local before, after
if tag then
before = str_sub(self.content, self.pos + 1, tag.opening.from - 1)
if tag.closing then
-- This is block level (with a closing tag)
after = str_sub(self.content, tag.closing.to + 1)
self.pos = tag.closing.to
else
-- Inline (no closing tag)
after = str_sub(self.content, tag.opening.to + 1)
self.pos = tag.opening.to
end
end
return tag, before, after
end
function _M.open_pattern(tag)
if tag == "!--esi" then
return "<(!--esi)"
else
-- $1: the tag name, $2 the closing characters, e.g. "/>" or ">"
return "<(" .. tag .. [[)(?:\s*(?:[a-z]+=\".+?(?]*?(?:\s*)(\/>|>)?]]
end
end
function _M.close_pattern(tag)
if tag == "!--esi" then
return "-->"
else
-- $1: the tag name
return "(" .. tag .. ")\\s*>"
end
end
function _M.either_pattern(tag)
if tag == "!--esi" then
return "(?:<(!--esi)|(-->))"
else
-- $1: the tag name, $2 the closing characters, e.g. "/>" or ">"
return [[<[\/]?(]] .. tag .. [[)(?:\s*(?:[a-z]+=\".+?(?]*?(?:\s*)(\s*\\/>|>)?]]
end
end
-- Finds the next esi tag, accounting for nesting to find the correct
-- matching closing tag etc.
function _M.find_whole_tag(self, tag)
-- Only work on the remaining markup (after pos)
local markup = str_sub(self.content, self.pos + 1)
if not tag then
-- Look for anything (including comment syntax)
tag = "(?:!--esi)|(?:esi:[a-z]+)"
end
-- Find the first opening tag
local opening_f, opening_t, err = ngx_re_find(markup, self.open_pattern(tag), "soj")
if not opening_f then
if err then ngx_log(ngx_ERR, err) end
-- Nothing here
return nil
end
-- We found an opening tag and has its position, but need to understand it better
-- to handle comments and inline tags.
local opening_m, err = ngx_re_match(
str_sub(markup, opening_f, opening_t),
self.open_pattern(tag), "soj"
)
if not opening_m then
if err then ngx_log(ngx_ERR, err) end
return nil
end
-- We return a table with opening tag positions (absolute), as well as
-- tag contents etc. Block level tags will have "closing" data too.
local ret = {
opening = {
from = opening_f + self.pos,
to = opening_t + self.pos,
tag = str_sub(markup, opening_f, opening_t),
},
tagname = opening_m[1],
closing = nil,
contents = nil,
}
-- If this is an inline (non-block) tag, we have everything
if type(opening_m[2]) == "string" and str_sub(opening_m[2], -2) == "/>" then
ret.whole = str_sub(markup, opening_f, opening_t)
return ret
end
-- We must be block level, and could potentially be nesting
local search = opening_t -- We search from after the opening tag
local f, t, closing_f, closing_t
local depth = 1
local level = 1
repeat
-- keep looking for opening or closing tags
f, t = ngx_re_find(str_sub(markup, search + 1), self.either_pattern(ret.tagname), "soj")
if f and t then
-- Move closing markers along
closing_f = f
closing_t = t
-- Track current level and total depth
local tag = str_sub(markup, search + f, search + t)
if ngx_re_find(tag, self.open_pattern(ret.tagname)) then
depth = depth + 1
level = level + 1
elseif ngx_re_find(tag, self.close_pattern(ret.tagname)) then
level = level - 1
end
-- Move search pos along
search = search + t
end
until level == 0 or not f
if closing_t and t then
-- We have a complete block tag with the matching closing tag
-- Make closing tag absolute
closing_t = closing_t + search - t
closing_f = closing_f + search - t
ret.closing = {
from = closing_f + self.pos,
to = closing_t + self.pos,
tag = str_sub(markup, closing_f, closing_t),
}
ret.contents = str_sub(markup, opening_t + 1, closing_f - 1)
ret.whole = str_sub(markup, opening_f, closing_t)
return ret
else
-- We have an opening block tag, but not the closing part. Return
-- what we can as the filters will buffer until we find the rest.
return ret
end
end
return _M
================================================
FILE: lib/ledge/esi.lua
================================================
local h_util = require "ledge.header_util"
local type, tonumber = type, tonumber
local str_sub = string.sub
local str_find = string.find
local tbl_concat = table.concat
local tbl_insert = table.insert
local ngx_re_match = ngx.re.match
local ngx_req_get_headers = ngx.req.get_headers
local ngx_req_get_uri_args = ngx.req.get_uri_args
local ngx_encode_args = ngx.encode_args
local ngx_req_set_uri_args = ngx.req.set_uri_args
local ngx_var = ngx.var
local ngx_log = ngx.log
local ngx_ERR = ngx.ERR
local _M = {
_VERSION = "2.3.0",
}
local esi_processors = {
["ESI"] = {
["1.0"] = require "ledge.esi.processor_1_0",
-- 2.0 = require ledge.esi.processor_2_0", -- for example
},
}
function _M.split_esi_token(token)
if token then
local m = ngx_re_match(
token,
[[^([A-Za-z0-9-_]+)\/(\d+\.?\d+)$]],
"oj"
)
if m then
return m[1], tonumber(m[2])
end
end
end
function _M.esi_capabilities()
local capabilities = {}
for processor_type,processors in pairs(esi_processors) do
for version,_ in pairs(processors) do
tbl_insert(capabilities, processor_type .. "/" .. version)
end
end
return tbl_concat(capabilities, " ")
end
-- Returns a processor instance based on Surrogate-Control header
function _M.choose_esi_processor(handler)
local res = handler.response
local res_surrogate_control = res.header["Surrogate-Control"]
if res_surrogate_control then
-- Get the token value (e.g. "ESI/1.0")
local content_token =
h_util.get_header_token(res_surrogate_control, "content")
if content_token then
local processor_token, version = _M.split_esi_token(content_token)
if processor_token and version then
-- Lookup the prcoessor
local processor_type = esi_processors[processor_token]
if processor_type then
for v,processor in pairs(processor_type) do
if tonumber(version) <= tonumber(v) then
return processor.new(handler)
end
end
end
end
end
end
end
-- Returns true of res.header.Content-Type is in allowed_types
function _M.is_allowed_content_type(res, allowed_types)
if allowed_types and type(allowed_types) == "table" then
local res_content_type = res.header["Content-Type"]
if res_content_type then
for _, content_type in ipairs(allowed_types) do
local sep = str_find(res_content_type, ";")
if sep then sep = sep - 1 end
if str_sub(res_content_type, 1, sep) == content_type then
return true
end
end
end
end
end
-- Returns true if we're allowed to delegate ESI processing to a downstream
-- surrogate for the current request
function _M.can_delegate_to_surrogate(surrogates, processor_token)
local surrogate_capability = ngx_req_get_headers()["Surrogate-Capability"]
if surrogate_capability then
-- Surrogate-Capability: host.example.com="ESI/1.0"
local capability_token = h_util.get_header_token(
surrogate_capability,
"[!#\\$%&'\\*\\+\\-.\\^_`\\|~0-9a-zA-Z]+"
)
local capability_processor, capability_version =
_M.split_esi_token(capability_token)
if capability_processor and capability_version then
local control_processor, control_version =
_M.split_esi_token(processor_token)
if control_processor and control_version
and control_processor == capability_processor
and control_version <= capability_version then
if type(surrogates) == "boolean" then
if surrogates == true then
return true
end
elseif type(surrogates) == "table" then
local remote_addr = ngx_var.remote_addr
if remote_addr then
for _, ip in ipairs(surrogates) do
if ip == remote_addr then
return true
end
end
end
end
end
end
end
return false
end
function _M.filter_esi_args(handler)
local config = handler.config
local esi_args_prefix = config.esi_args_prefix
if esi_args_prefix then
local args = ngx_req_get_uri_args(config.max_uri_args)
local esi_args = {}
local has_esi_args = false
local non_esi_args = {}
for k,v in pairs(args) do
-- TODO: optimise
-- If we have the prefix, extract the suffix
local m, err = ngx_re_match(
k,
"^" .. esi_args_prefix .. "(\\S+)",
"oj"
)
if err then ngx_log(ngx_ERR, err) end
if m and m[1] then
has_esi_args = true
esi_args[m[1]] = v
else
-- Otherwise, this is a normal arg
non_esi_args[k] = v
end
end
if has_esi_args then
-- Add them to ctx to be read by the esi processor, along with a
-- __tostsring metamethod for the $(ESI_ARGS) string case
ngx.ctx.__ledge_esi_args = setmetatable(esi_args, {
__tostring = function(t)
local args = {}
for k,v in pairs(t) do
args[esi_args_prefix .. k] = v
end
return ngx_encode_args(args)
end
})
-- Set the request args to the ones left over
ngx_req_set_uri_args(non_esi_args)
end
end
end
return _M
================================================
FILE: lib/ledge/gzip.lua
================================================
local co_yield = coroutine.yield
local co_wrap = require("ledge.util").coroutine.wrap
local ngx_log = ngx.log
local ngx_ERR = ngx.ERR
local zlib = require("ffi-zlib")
local _M = {
_VERSION = "2.3.0",
}
local zlib_output = function(data)
co_yield(data)
end
local function get_gzip_decoder(reader)
return co_wrap(function(buffer_size)
local ok, err = zlib.inflateGzip(reader, zlib_output, buffer_size)
if not ok then
ngx_log(ngx_ERR, err)
end
-- zlib decides it is done when the stream is complete.
-- Call reader() one more time to resume the next coroutine in the
-- chain.
reader(buffer_size)
end)
end
_M.get_gzip_decoder = get_gzip_decoder
local function get_gzip_encoder(reader)
return co_wrap(function(buffer_size)
local ok, err = zlib.deflateGzip(reader, zlib_output, buffer_size)
if not ok then
ngx_log(ngx_ERR, err)
end
-- zlib decides it is done when the stream is complete.
-- Call reader() one more time to resume the next coroutine in the
-- chain
reader(buffer_size)
end)
end
_M.get_gzip_encoder = get_gzip_encoder
return _M
================================================
FILE: lib/ledge/handler.lua
================================================
local setmetatable, tostring, tonumber, pcall, type, ipairs, pairs, next, error =
setmetatable, tostring, tonumber, pcall, type, ipairs, pairs, next, error
local ngx_req_get_method = ngx.req.get_method
local ngx_req_get_headers = ngx.req.get_headers
local ngx_req_http_version = ngx.req.http_version
local ngx_log = ngx.log
local ngx_WARN = ngx.WARN
local ngx_ERR = ngx.ERR
local ngx_INFO = ngx.INFO
local ngx_var = ngx.var
local ngx_null = ngx.null
local ngx_flush = ngx.flush
local ngx_print = ngx.print
local ngx_on_abort = ngx.on_abort
local ngx_md5 = ngx.md5
local ngx_time = ngx.time
local ngx_http_time = ngx.http_time
local ngx_parse_http_time = ngx.parse_http_time
local str_lower = string.lower
local str_len = string.len
local tbl_insert = table.insert
local tbl_concat = table.concat
local esi_capabilities = require("ledge.esi").esi_capabilities
local append_server_port = require("ledge.util").append_server_port
local ledge_cache_key = require("ledge.cache_key")
local req_relative_uri = require("ledge.request").relative_uri
local req_full_uri = require("ledge.request").full_uri
local put_background_job = require("ledge.background").put_background_job
local gc_wait = require("ledge.background").gc_wait
local fixed_field_metatable = require("ledge.util").mt.fixed_field_metatable
local get_fixed_field_metatable_proxy =
require("ledge.util").mt.get_fixed_field_metatable_proxy
local ledge = require("ledge")
local http = require("resty.http")
local http_headers = require("resty.http_headers")
local state_machine = require("ledge.state_machine")
local response = require("ledge.response")
local _M = {
_VERSION = "2.3.0",
}
-- Creates a new handler instance.
--
-- Config defaults are provided in the ledge module, and so instances
-- should always be created with ledge.create_handler(), not directly.
--
-- @param table The complete config table
-- @return table Handler instance, or nil if no config table is provided
local function new(config, events)
if not config then return nil, "config table expected" end
config = setmetatable(config, fixed_field_metatable)
local self = setmetatable({
-- public:
config = config,
events = events,
upstream_client = {},
-- Slots for composed objects
redis = {},
redis_subscriber = {},
storage = {},
state_machine = {},
range = {},
response = {},
error_response = {},
esi_processor = {},
client_validators = {},
output_buffers_enabled = true,
esi_scan_enabled = false,
esi_process_enabled = false,
-- private:
_root_key = "",
_vary_key = ngx_null, -- empty string is not the same as not set
_vary_spec = ngx_null, -- empty table is not the same as not set
_cache_key_chain = {},
_publish_key = "",
}, get_fixed_field_metatable_proxy(_M))
return self
end
_M.new = new
local function run(self)
-- Instantiate state machine
local sm = state_machine.new(self)
self.state_machine = sm
-- Install the client abort handler
local ok, err = ngx_on_abort(function()
return self.state_machine:e "aborted"
end)
if not ok then
ngx_log(ngx_WARN, "on_abort handler could not be set: " .. err)
end
-- Create Redis connection
local redis, err = ledge.create_redis_connection()
if not redis then
return nil, "could not connect to redis, " .. tostring(err)
else
self.redis = redis
end
-- Create storage connection
local config = self.config
local storage, err = ledge.create_storage_connection(
config.storage_driver,
config.storage_driver_config
)
if not storage then
return nil, "could not connect to storage, " .. tostring(err)
else
self.storage = storage
end
return sm:e "init"
end
_M.run = run
-- Bind a user callback to an event
--
-- Callbacks will be called in the order they are bound
--
-- @param table self
-- @param string event name
-- @param function callback
-- @return bool, string success, error
local function bind(self, event, callback)
local ev = self.events[event]
if not ev then
local err = "no such event: " .. tostring(event)
ngx_log(ngx_ERR, err)
return nil, err
else
tbl_insert(ev, callback)
end
return true, nil
end
_M.bind = bind
-- Calls any registered callbacks for event, in the order they were bound
-- Hard errors if event is not specified in self.events
local function emit(self, event, ...)
local ev = self.events[event]
if not ev then
error("attempt to emit non existent event: " .. tostring(event), 2)
end
for _, handler in ipairs(ev) do
if type(handler) == "function" then
local ok, err = pcall(handler, ...)
if not ok then
ngx_log(ngx_ERR,
"error in user callback for '", event, "': ", err)
end
end
end
return true
end
_M.emit = emit
function _M.entity_id(self, key_chain)
if not key_chain or not key_chain.main then return nil end
local entity_id, err = self.redis:hget(key_chain.main, "entity")
if not entity_id or entity_id == ngx_null then
return nil, err
end
return entity_id
end
local function root_key(self)
if self._root_key == "" then
self._root_key = ledge_cache_key.generate_root_key(
self.config.cache_key_spec,
self.config.max_uri_args
)
end
return self._root_key
end
_M.root_key = root_key
local function vary_spec(self, root_key)
if self._vary_spec == ngx_null then
local vary_spec, err = ledge_cache_key.read_vary_spec(
self.redis,
root_key
)
if not vary_spec then
ngx_log(ngx_ERR, "Failed to read vary spec: ", err)
return false
end
self._vary_spec = vary_spec
end
return self._vary_spec
end
_M.vary_spec = vary_spec
local function create_vary_key_callback(self)
return function(vary_key)
-- TODO: gunzip?
emit(self, "before_vary_selection", vary_key)
end
end
_M.create_vary_key_callback = create_vary_key_callback
local function vary_key(self, vary_spec)
if self._vary_key == ngx_null then
self._vary_key = ledge_cache_key.generate_vary_key(
vary_spec,
create_vary_key_callback(self)
)
end
return self._vary_key
end
_M.vary_key = vary_key
local function cache_key_chain(self)
if not next(self._cache_key_chain) then
if not self.redis or not next(self.redis) then
ngx_log(ngx_ERR, "Cannot get cache key without a redis connection")
return nil
end
local rk = root_key(self)
local vs = vary_spec(self, rk)
local vk = vary_key(self, vs)
local chain, err = ledge_cache_key.key_chain(rk, vk, vs)
if not chain then
return nil, err
end
self._cache_key_chain = chain
end
return self._cache_key_chain
end
_M.cache_key_chain = cache_key_chain
local function reset_cache_key(self)
self._root_key = ""
self._vary_key = ngx_null
self._vary_spec = ngx_null
self._cache_key_chain = {}
end
_M.reset_cache_key = reset_cache_key
local function set_vary_spec(self, vary_spec)
reset_cache_key(self)
if vary_spec then
self._vary_spec = vary_spec
end
end
_M.set_vary_spec = set_vary_spec
local function read_from_cache(self)
local res, err = response.new(self)
if not res then return nil, err end
local ok, err = res:read()
if err then
-- Error, abort request
ngx_log(ngx_ERR, "could not read response: ", err)
return self.state_machine:e "http_internal_server_error"
end
if not ok then
return {} -- MISS
end
if res.size > 0 then
local storage = self.storage
-- Check storage has the entity, if not presume it has been evitcted
-- and clean up
if not storage:exists(res.entity_id) then
local config = self.config
put_background_job(
"ledge_gc",
"ledge.jobs.collect_entity",
{
entity_id = res.entity_id,
storage_driver = config.storage_driver,
storage_driver_config = config.storage_driver_config,
},
{
delay = gc_wait(
res.size,
config.minimum_old_entity_download_rate
),
tags = { "collect_entity" },
priority = 10,
}
)
return {} -- MISS
end
res:filter_body_reader("cache_body_reader", storage:get_reader(res))
end
emit(self, "after_cache_read", res)
return res
end
_M.read_from_cache = read_from_cache
-- http://www.w3.org/Protocols/rfc2616/rfc2616-sec13.html#sec13.5.1
local hop_by_hop_headers = {
["connection"] = true,
["keep-alive"] = true,
["proxy-authenticate"] = true,
["proxy-authorization"] = true,
["te"] = true,
["trailers"] = true,
["transfer-encoding"] = true,
["upgrade"] = true,
["content-length"] = true, -- Not strictly hop-by-hop, but we
-- set dynamically downstream.
}
-- Fetches a resource from the origin server.
local function fetch_from_origin(self)
local res, err = response.new(self)
if not res then return nil, err end
local method = ngx['HTTP_' .. ngx_req_get_method()]
if not method then
res.status = ngx.HTTP_METHOD_NOT_IMPLEMENTED
return res
end
emit(self, "before_upstream_connect", self)
local config = self.config
if not next(self.upstream_client) then
local httpc = http.new()
httpc:set_timeouts(
config.upstream_connect_timeout,
config.upstream_send_timeout,
config.upstream_read_timeout
)
local port = tonumber(config.upstream_port)
local ok, err
if port then
ok, err = httpc:connect(config.upstream_host, port)
else
ok, err = httpc:connect(config.upstream_host)
end
if not ok then
ngx_log(ngx_ERR, "upstream connection failed: ", err)
if err == "timeout" then
res.status = 524 -- upstream server timeout
else
res.status = 503
end
return res
end
if config.upstream_use_ssl == true then
-- treat an empty ("") ssl_server_name as nil
local ssl_server_name = config.upstream_ssl_server_name
if type(ssl_server_name) ~= "string" or
str_len(ssl_server_name) == 0 then
ssl_server_name = nil
end
local ok, err = httpc:ssl_handshake(
false,
ssl_server_name,
config.upstream_ssl_verify
)
if not ok then
ngx_log(ngx_ERR, "ssl handshake failed: ", err)
res.status = 525 -- SSL Handshake Failed
return res
end
end
self.upstream_client = httpc
end
local upstream_client = self.upstream_client
-- Case insensitve headers so that we can safely manipulate them
local headers = http_headers.new()
for k,v in pairs(ngx_req_get_headers()) do
headers[k] = v
end
-- Advertise ESI surrogate capabilities
if config.esi_enabled then
local capability_entry = self.config.visible_hostname .. '="'
.. esi_capabilities() .. '"'
local sc = headers["Surrogate-Capability"]
if not sc then
headers["Surrogate-Capability"] = capability_entry
else
headers["Surrogate-Capability"] = sc .. ", " .. capability_entry
end
end
local client_body_reader, err =
upstream_client:get_client_body_reader(config.buffer_size)
if err then
ngx_log(ngx_ERR, "error getting client body reader: ", err)
end
local req_params = {
method = ngx_req_get_method(),
path = req_relative_uri(),
body = client_body_reader,
headers = headers,
}
-- allow request params to be customised
emit(self, "before_upstream_request", req_params)
local origin, err = upstream_client:request(req_params)
if not origin then
ngx_log(ngx_ERR, err)
res.status = 524
return res
end
res.status = origin.status
-- Merge end-to-end headers
local hop_by_hop_headers = hop_by_hop_headers
for k,v in pairs(origin.headers) do
if not hop_by_hop_headers[str_lower(k)] then
res.header[k] = v
end
end
-- May well be nil (we set to false if that's the case), but if present
-- we bail on saving large bodies to memory nice and early.
res.length = tonumber(origin.headers["Content-Length"]) or false
res.has_body = origin.has_body
res:filter_body_reader(
"upstream_body_reader",
origin.body_reader
)
if res.status < 500 then
-- http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.18
-- A received message that does not have a Date header field MUST be
-- assigned one by the recipient if the message will be cached by that
-- recipient
if type(res.header["Date"]) ~= "string" or
not ngx_parse_http_time(res.header["Date"]) then
ngx_log(ngx_WARN,
"Missing or invalid Date header from upstream, generating locally"
)
res.header["Date"] = ngx_http_time(ngx_time())
end
end
-- A nice opportunity for post-fetch / pre-save work.
emit(self, "after_upstream_request", res)
return res
end
_M.fetch_from_origin = fetch_from_origin
-- Returns data required to perform a background revalidation for this current
-- request, as two tables; reval_params and reval_headers.
local function revalidation_data(self)
-- Everything that a headless revalidation job would need to connect
local config = self.config
local reval_params = {
server_addr = ngx_var.server_addr,
server_port = ngx_var.server_port,
scheme = ngx_var.scheme,
uri = ngx_var.request_uri,
connect_timeout = config.upstream_connect_timeout,
send_timeout = config.upstream_send_timeout,
read_timeout = config.upstream_read_timeout,
keepalive_timeout = config.upstream_keepalive_timeout,
keepalive_poolsize = config.upstream_keepalive_poolsize,
}
local h = ngx_req_get_headers()
-- By default we pass through Host, and Authorization and Cookie headers
-- if present.
local reval_headers = {
host = h["Host"],
}
if h["Authorization"] then
reval_headers["Authorization"] = h["Authorization"]
end
if h["Cookie"] then
reval_headers["Cookie"] = h["Cookie"]
end
emit(self, "before_save_revalidation_data", reval_params, reval_headers)
return reval_params, reval_headers
end
local function revalidate_in_background(self, key_chain, update_revalidation_data)
local redis = self.redis
-- Revalidation data is updated if this is a proper request, but not if
-- it's a purge request.
if update_revalidation_data then
local reval_params, reval_headers = revalidation_data(self)
local ttl, err = redis:ttl(key_chain.reval_params)
if not ttl or ttl == ngx_null or ttl < 0 then
if err then ngx_log(ngx_ERR, err) end
ngx_log(ngx_INFO,
"Could not determine expiry for revalidation params. " ..
"Will fallback to 3600 seconds."
)
-- Arbitrarily expire these revalidation parameters in an hour.
ttl = 3600
end
-- Delete and update reval request headers
local _, e
_, e = redis:multi()
if e then ngx_log(ngx_ERR, e) end
_, e = redis:del(key_chain.reval_params)
if e then ngx_log(ngx_ERR, e) end
_, e = redis:hmset(key_chain.reval_params, reval_params)
if e then ngx_log(ngx_ERR, e) end
_, e = redis:expire(key_chain.reval_params, ttl)
if e then ngx_log(ngx_ERR, e) end
_, e = redis:del(key_chain.reval_req_headers)
if e then ngx_log(ngx_ERR, e) end
_, e = redis:hmset(key_chain.reval_req_headers, reval_headers)
if e then ngx_log(ngx_ERR, e) end
_, e = redis:expire(key_chain.reval_req_headers, ttl)
if e then ngx_log(ngx_ERR, e) end
local res, err = redis:exec()
if not res or res == ngx_null then
ngx_log(ngx_ERR, "Could not update revalidation params: ", err)
end
end
local uri, err = redis:hget(key_chain.main, "uri")
if not uri or uri == ngx_null then
if err then
ngx_log(ngx_ERR, "Failed to get main key while revalidating: ", err)
else
ngx_log(ngx_WARN,
"Cache key has no 'uri' field, aborting revalidation"
)
end
return nil
end
-- Schedule the background job (immediately). jid is a function of the
-- URI for automatic de-duping.
return put_background_job(
"ledge_revalidate",
"ledge.jobs.revalidate",
{ key_chain = key_chain },
{
jid = ngx_md5("revalidate:" .. uri),
tags = { "revalidate" },
priority = 4,
}
)
end
_M.revalidate_in_background = revalidate_in_background
-- Starts a "revalidation" job but maybe for brand new cache. We pass the
-- current request's revalidation data through so that the job has meaninful
-- parameters to work with (rather than using stored metadata).
local function fetch_in_background(self)
local key_chain = cache_key_chain(self)
local reval_params, reval_headers = revalidation_data(self)
return put_background_job(
"ledge_revalidate",
"ledge.jobs.revalidate",
{
key_chain = key_chain,
reval_params = reval_params,
reval_headers = reval_headers,
},
{
jid = ngx_md5("revalidate:" .. req_full_uri()),
tags = { "revalidate" },
priority = 4,
}
)
end
_M.fetch_in_background = fetch_in_background
local function save_to_cache(self, res)
if not res then return nil, "no response to save" end
emit(self, "before_save", res)
-- Length is only set if there was a Content-Length header
local length = res.length
local storage = self.storage
local max_size = storage:get_max_size()
if length and length > max_size then
-- We'll carry on serving, just not saving.
return nil, "advertised length is greated than storage max size"
end
-- Watch the main key pointer. We abort the transaction if another request
-- updates this key before we finish.
local key_chain = cache_key_chain(self)
local redis = self.redis
redis:watch(key_chain.main)
local repset_ttl = redis:ttl(key_chain.repset)
-- We'll need to mark the old entity for expiration shortly, as reads
-- could still be in progress. We need to know the previous entity keys
-- and the size.
local previous_entity_id = self:entity_id(key_chain)
local previous_entity_size, err, gc_job_spec
if previous_entity_id then
previous_entity_size, err = redis:hget(key_chain.main, "size")
if previous_entity_size == ngx_null then
previous_entity_id = nil
if err then
ngx_log(ngx_ERR, err)
end
end
-- Define GC job here, used later if required
gc_job_spec = {
"ledge_gc",
"ledge.jobs.collect_entity",
{
entity_id = previous_entity_id,
storage_driver = self.config.storage_driver,
storage_driver_config = self.config.storage_driver_config,
},
{
delay = gc_wait(
previous_entity_size,
self.config.minimum_old_entity_download_rate
),
tags = { "collect_entity" },
priority = 10,
}
}
end
-- Start the transaction
local ok, err = redis:multi()
if not ok then ngx_log(ngx_ERR, err) end
if previous_entity_id then
local ok, err = redis:srem(key_chain.entities, previous_entity_id)
if not ok then ngx_log(ngx_ERR, err) end
end
res.uri = req_full_uri()
local keep_cache_for = self.config.keep_cache_for
local ok, err = res:save(keep_cache_for)
if not ok then ngx_log(ngx_ERR, err) end
-- Set revalidation parameters from this request
local reval_params, reval_headers = revalidation_data(self)
local _, err = redis:del(key_chain.reval_params)
if err then ngx_log(ngx_ERR, err) end
_, err = redis:hmset(key_chain.reval_params, reval_params)
if err then ngx_log(ngx_ERR, err) end
_, err = redis:del(key_chain.reval_req_headers)
if err then ngx_log(ngx_ERR, err) end
_, err = redis:hmset(key_chain.reval_req_headers, reval_headers)
if err then ngx_log(ngx_ERR, err) end
local expiry = res:ttl() + keep_cache_for
redis:expire(key_chain.reval_params, expiry)
redis:expire(key_chain.reval_req_headers, expiry)
-- repset and vary TTL should be the same as the longest living represenation
if repset_ttl < expiry then
repset_ttl = expiry
end
-- Save updates to cache key
ledge_cache_key.save_key_chain(redis, key_chain, repset_ttl)
-- If we have a body, we need to attach the storage writer
-- NOTE: res.has_body is false for known bodyless repsonse types
-- (e.g. HEAD) but may be true and of zero length (commonly 301 etc).
if res.has_body then
-- Storage callback for write success
local function onsuccess(bytes_written)
-- Update size in metadata
local ok, e = redis:hset(key_chain.main, "size", bytes_written)
if not ok or ok == ngx_null then ngx_log(ngx_ERR, e) end
if bytes_written == 0 then
-- Remove the entity as it wont exist
ok, e = redis:srem(key_chain.entities, res.entity_id)
if not ok or ok == ngx_null then ngx_log(ngx_ERR, e) end
ok, e = redis:hdel(key_chain.main, "entity")
if not ok or ok == ngx_null then ngx_log(ngx_ERR, e) end
end
ok, e = redis:exec()
if not ok or ok == ngx_null then
if e then
ngx_log(ngx_ERR, "failed to complete transaction: ", e)
else
-- Transaction likely failed due to watch on main key
-- Tell storage to clean up too
ok, e = storage:delete(res.entity_id) -- luacheck: ignore ok
if e then
ngx_log(ngx_ERR, "failed to cleanup storage: ", e)
end
end
elseif previous_entity_id then
-- Everything has completed and we have an old entity
-- Schedule GC to clean it up
put_background_job(unpack(gc_job_spec))
end
end
-- Storage callback for write failure. We roll back our transaction.
local function onfailure(reason)
ngx_log(ngx_ERR, "storage failed to write: ", reason)
local ok, e = redis:discard()
if not ok or ok == ngx_null then ngx_log(ngx_ERR, e) end
end
-- Attach storage writer
local ok, writer = pcall(storage.get_writer, storage,
res,
keep_cache_for,
onsuccess,
onfailure
)
if not ok then
ngx_log(ngx_ERR, writer)
else
res:filter_body_reader("cache_body_writer", writer)
end
else
-- No body and thus no storage filter
-- We can run our transaction immediately
local ok, e = redis:exec()
if not ok or ok == ngx_null then
ngx_log(ngx_ERR, "failed to complete transaction: ", e)
elseif previous_entity_id then
-- Everything has completed and we have an old entity
-- Schedule GC to clean it up
put_background_job(unpack(gc_job_spec))
end
end
return true
end
_M.save_to_cache = save_to_cache
local function delete_from_cache(self, key_chain)
local redis = self.redis
-- Get entity_id if not already provided
local entity_id = self:entity_id(key_chain)
-- Schedule entity collection
if entity_id then
local config = self.config
local size = redis:hget(key_chain.main, "size")
put_background_job(
"ledge_gc",
"ledge.jobs.collect_entity",
{
entity_id = entity_id,
storage_driver = config.storage_driver,
storage_driver_config = config.storage_driver_config,
},
{
delay = gc_wait(
size,
config.minimum_old_entity_download_rate
),
tags = { "collect_entity" },
priority = 10,
}
)
end
-- Remove this representation from the repset
redis:srem(key_chain.repset, key_chain.full)
-- Delete everything in the keychain
local keys = {}
for _, v in pairs(key_chain) do
tbl_insert(keys, v)
end
-- If there are no more entries in the repset clean up the vary key too
local exists = redis:exists(key_chain.repset)
if exists == 0 then
tbl_insert(keys, key_chain.vary)
end
return redis:del(unpack(keys))
end
_M.delete_from_cache = delete_from_cache
-- Resumes the reader coroutine and prints the data yielded. This could be
-- via a cache read, or a save via a fetch... the interface is uniform.
local function serve_body(self, res, buffer_size)
local buffered = 0
local reader = res.body_reader
local can_flush = ngx_req_http_version() >= 1.1
repeat
local chunk, err = reader(buffer_size)
if err then ngx_log(ngx_ERR, err) end
if chunk and self.output_buffers_enabled then
local ok, err = ngx_print(chunk)
if not ok then ngx_log(ngx_INFO, err) end
-- Flush each full buffer, if we can
buffered = buffered + #chunk
if can_flush and buffered >= buffer_size then
local ok, err = ngx_flush(true)
if not ok then ngx_log(ngx_INFO, err) end
buffered = 0
end
end
until not chunk
end
local function serve(self)
if not ngx.headers_sent then
local res = self.response
local name = append_server_port(self.config.visible_hostname)
-- Via header
local via = "1.1 " .. name
if self.config.advertise_ledge then
via = via .. " (ledge/" .. _M._VERSION .. ")"
end
-- Append upstream Via
local res_via = res.header["Via"]
if (res_via ~= nil) then
-- Fix multiple upstream Via headers into list form
if (type(res_via) == "table") then
res.header["Via"] = via .. ", " .. tbl_concat(res_via, ", ")
else
res.header["Via"] = via .. ", " .. res_via
end
else
res.header["Via"] = via
end
-- X-Cache header
-- Don't set if this isn't a cacheable response. Set to MISS is we
-- fetched.
local state_history = self.state_machine.state_history
local event_history = self.state_machine.event_history
if not event_history["response_not_cacheable"] then
local x_cache = "HIT from " .. name
if not event_history["can_serve_disconnected"]
and not event_history["can_serve_stale"]
and state_history["fetching"] then
x_cache = "MISS from " .. name
end
local res_x_cache = res.header["X-Cache"]
if res_x_cache ~= nil then
res.header["X-Cache"] = x_cache .. ", " .. res_x_cache
else
res.header["X-Cache"] = x_cache
end
end
emit(self, "before_serve", res)
if res.header then
for k,v in pairs(res.header) do
ngx.header[k] = v
end
end
if res.body_reader and ngx_req_get_method() ~= "HEAD" then
local buffer_size = self.config.buffer_size
serve_body(self, res, buffer_size)
end
ngx.eof()
end
end
_M.serve = serve
local function add_warning(self, code)
return self.response:add_warning(
code,
append_server_port(self.config.visible_hostname)
)
end
_M.add_warning = add_warning
return setmetatable(_M, fixed_field_metatable)
================================================
FILE: lib/ledge/header_util.lua
================================================
local type, tonumber, setmetatable =
type, tonumber, setmetatable
local ngx_re_match = ngx.re.match
local ngx_re_find = ngx.re.find
local tbl_concat = table.concat
local _M = {
_VERSION = "2.3.0"
}
local mt = {
__index = _M,
}
-- Returns true if the directive appears in the header field value.
-- Set without_token to true to only return bare directives - i.e.
-- directives appearing with no =value part.
function _M.header_has_directive(header, directive, without_token)
if header then
if type(header) == "table" then header = tbl_concat(header, ", ") end
local pattern = [[(?:\s*|,?)(]] .. directive .. [[)\s*(?:$|=|,)]]
if without_token then
pattern = [[(?:\s*|,?)(]] .. directive .. [[)\s*(?:$|,)]]
end
return ngx_re_find(header, pattern, "ioj") ~= nil
end
return false
end
function _M.get_header_token(header, directive)
if _M.header_has_directive(header, directive) then
if type(header) == "table" then header = tbl_concat(header, ", ") end
-- Want the string value from a token
local value = ngx_re_match(
header,
directive .. [[="?([a-z0-9_~!#%&/',`\$\*\+\-\|\^\.]+)"?]],
"ioj"
)
if value ~= nil then
return value[1]
end
return nil
end
return nil
end
function _M.get_numeric_header_token(header, directive)
if _M.header_has_directive(header, directive) then
if type(header) == "table" then header = tbl_concat(header, ", ") end
-- Want the numeric value from a token
local value = ngx_re_match(
header,
directive .. [[="?(\d+)"?]], "ioj"
)
if value ~= nil then
return tonumber(value[1])
end
end
end
return setmetatable(_M, mt)
================================================
FILE: lib/ledge/jobs/collect_entity.lua
================================================
local tostring = tostring
local ngx_null = ngx.null
local create_storage_connection = require("ledge").create_storage_connection
local _M = {
_VERSION = "2.3.0",
}
-- Cleans up expired items and keeps track of memory usage.
function _M.perform(job)
local storage, err = create_storage_connection(
job.data.storage_driver,
job.data.storage_driver_config
)
if not storage then
return nil, "job-error", "could not connect to storage driver: "..tostring(err)
end
local ok, err = storage:delete(job.data.entity_id)
storage:close()
if ok == nil or ok == ngx_null then
return nil, "job-error", tostring(err)
end
end
return _M
================================================
FILE: lib/ledge/jobs/purge.lua
================================================
local ipairs, tonumber = ipairs, tonumber
local ngx_log = ngx.log
local ngx_DEBUG = ngx.DEBUG
local ngx_ERR = ngx.ERR
local ngx_null = ngx.null
local purge = require("ledge.purge").purge
local create_redis_slave_connection = require("ledge").create_redis_slave_connection
local close_redis_connection = require("ledge").close_redis_connection
local _M = {
_VERSION = "2.3.0",
}
-- Scans the keyspace for keys which match, and expires them. We do this against
-- the slave Redis instance if available.
function _M.perform(job)
if not job.redis then
return nil, "job-error", "no redis connection provided"
end
local slave, _ = create_redis_slave_connection()
if not slave then
job.redis_slave = job.redis
else
job.redis_slave = slave
end
-- Setup handler
local handler = require("ledge").create_handler()
handler.redis = job.redis
local storage, err = require("ledge").create_storage_connection(
job.data.storage_driver,
job.data.storage_driver_config
)
if not storage then
return nil, "redis-error", err
end
handler.storage = storage
-- This runs recursively using the SCAN cursor, until the entire keyspace
-- has been scanned.
local res, err = _M.expire_pattern(0, job, handler)
if slave then
close_redis_connection(slave)
end
if not res then
return nil, "redis-error", err
end
end
-- Scans the keyspace based on a pattern (asterisk), and runs a purge for each cache entry
function _M.expire_pattern(cursor, job, handler)
if job:ttl() < 10 then
if not job:heartbeat() then
return nil, "Failed to heartbeat job"
end
end
-- Scan using the "main" key to get a single key per cache entry
local res, err = job.redis_slave:scan(
cursor,
"MATCH", job.data.repset,
"COUNT", job.data.keyspace_scan_count
)
if not res or res == ngx_null then
return nil, "SCAN error: " .. tostring(err)
else
for _,key in ipairs(res[2]) do
ngx_log(ngx_DEBUG, "Purging set: ", key)
local ok, err = purge(handler, job.data.purge_mode, key)
if ok == nil and err then ngx_log(ngx_ERR, tostring(err)) end
end
local cursor = tonumber(res[1])
if cursor == 0 then
return true
end
-- If we have a valid cursor, recurse to move on.
return _M.expire_pattern(cursor, job, handler)
end
end
return _M
================================================
FILE: lib/ledge/jobs/revalidate.lua
================================================
local http = require "resty.http"
local http_headers = require "resty.http_headers"
local ngx_null = ngx.null
local _M = {
_VERSION = "2.3.0",
}
-- Utility to return all items in a Redis hash as a Lua table.
local function hgetall(redis, key)
local res, err = redis:hgetall(key)
if not res or res == ngx_null then
return nil,
"could not retrieve " .. tostring(key) .. " data:" .. tostring(err)
end
return redis:array_to_hash(res)
end
function _M.perform(job)
-- Normal background revalidation operates on stored metadata.
-- A background fetch due to partial content from upstream however, uses the
-- current request metadata for reval_headers / reval_params and passes it
-- through as job data.
local reval_params = job.data.reval_params
local reval_headers = job.data.reval_headers
-- If we don't have the metadata in job data, this is a background
-- revalidation using stored metadata.
if not reval_params and not reval_headers then
local key_chain, redis, err = job.data.key_chain, job.redis
reval_params, err = hgetall(redis, key_chain.reval_params)
if not reval_params or not next(reval_params) then
return nil, "job-error",
"Revalidation parameters are missing, presumed evicted. " ..
tostring(err)
end
reval_headers, err = hgetall(redis, key_chain.reval_req_headers)
if not reval_headers or not next(reval_headers) then
return nil, "job-error",
"Revalidation headers are missing, presumed evicted." ..
tostring(err)
end
end
-- Make outbound http request to revalidate
local httpc = http.new()
httpc:set_timeouts(
reval_params.connect_timeout,
reval_params.send_timeout,
reval_params.read_timeout
)
local port = tonumber(reval_params.server_port)
local ok, err
if port then
ok, err = httpc:connect(reval_params.server_addr, port)
else
ok, err = httpc:connect(reval_params.server_addr)
end
if not ok then
return nil, "job-error",
"could not connect to server: " .. tostring(err)
end
if reval_params.scheme == "https" then
local ok, err = httpc:ssl_handshake(false, nil, false)
if not ok then
return nil, "job-error", "ssl handshake failed: " .. tostring(err)
end
end
local headers = http_headers.new() -- Case-insensitive header table
headers["Cache-Control"] = "max-stale=0, stale-if-error=0"
headers["User-Agent"] =
httpc._USER_AGENT .. " ledge_revalidate/" .. _M._VERSION
-- Add additional headers from parent
for k,v in pairs(reval_headers) do
headers[k] = v
end
local res, err = httpc:request{
method = "GET",
path = reval_params.uri,
headers = headers,
}
if not res then
return nil, "job-error", "revalidate failed: " .. tostring(err)
else
local reader = res.body_reader
-- Read and discard the body
repeat
local chunk, _ = reader()
until not chunk
httpc:set_keepalive(
reval_params.keepalive_timeout,
reval_params.keepalive_poolsize
)
end
end
return _M
================================================
FILE: lib/ledge/purge.lua
================================================
local pcall, tonumber, tostring, pairs =
pcall, tonumber, tostring, pairs
local tbl_insert = table.insert
local ngx_var = ngx.var
local ngx_log = ngx.log
local ngx_ERR = ngx.ERR
local ngx_null = ngx.null
local ngx_time = ngx.time
local ngx_md5 = ngx.md5
local ngx_HTTP_BAD_REQUEST = ngx.HTTP_BAD_REQUEST
local str_find = string.find
local str_sub = string.sub
local str_len = string.len
local http = require("resty.http")
local cjson_encode = require("cjson").encode
local cjson_decode = require("cjson").decode
local fixed_field_metatable = require("ledge.util").mt.fixed_field_metatable
local put_background_job = require("ledge.background").put_background_job
local key_chain = require("ledge.cache_key").key_chain
local _M = {
_VERSION = "2.3.0",
}
local repset_len = -(str_len("::repset")+1)
local function create_purge_response(purge_mode, result, qless_jobs)
local d = {
purge_mode = purge_mode,
result = result,
}
if qless_jobs then d.qless_jobs = qless_jobs end
local ok, json = pcall(cjson_encode, d)
if not ok then
return nil, json
else
return json
end
end
_M.create_purge_response = create_purge_response
-- Expires the keys in key_chain and reduces the ttl in storage
local function expire_keys(redis, storage, key_chain, entity_id)
local ttl, err = redis:ttl(key_chain.main)
if not ttl or ttl == ngx_null or ttl == -1 then
return nil, "count not determine existing ttl: " .. (err or "")
end
if ttl == -2 then
-- Key doesn't exist, do nothing
return false, nil
end
local expires, err = redis:hget(key_chain.main, "expires")
expires = tonumber(expires)
if not expires or expires == ngx_null then
return nil, "could not determine existing expiry: " .. (err or "")
end
local time = ngx_time()
-- If expires is in the past then this key is stale. Nothing to do here.
if expires <= time then
return false, nil
end
local ttl_reduction = expires - time
if ttl_reduction < 0 then ttl_reduction = 0 end
local new_ttl = ttl - ttl_reduction
local _, e = redis:multi()
if e then ngx_log(ngx_ERR, e) end
-- Set the expires field of the main key to the new time, to control
-- its validity.
_, e = redis:hset(key_chain.main, "expires", tostring(time - 1))
if e then ngx_log(ngx_ERR, e) end
-- Set new TTLs for all keys in the key chain
for _,key in pairs(key_chain) do
local _, e = redis:expire(key, new_ttl)
if e then ngx_log(ngx_ERR, e) end
end
-- Reduce TTL on entity if there is one
if entity_id and entity_id ~= ngx_null then
storage:set_ttl(entity_id, new_ttl)
end
local ok, err = redis:exec() -- luacheck: ignore ok
if err then
return nil, err
else
return true, nil
end
end
_M.expire_keys = expire_keys
-- Purges the cache item according to purge_mode which defaults to "invalidate".
-- If there's nothing to do we return false which results in a 404.
-- @param table handler instance
-- @param string "invalidate" | "delete" | "revalidate
-- @param table key_chain to purge
-- @return boolean success
-- @return string message
-- @return table qless job (for revalidate only)
local function _purge(handler, purge_mode, key_chain)
local redis = handler.redis
local storage = handler.storage
local exists, err = redis:exists(key_chain.main)
if err then ngx_log(ngx_ERR, err) end
-- We 404 if we have nothing
if not exists or exists == ngx_null or exists == 0 then
return false, "nothing to purge", nil
end
-- Delete mode overrides everything else, since you can't revalidate
if purge_mode == "delete" then
local res, err = handler:delete_from_cache(key_chain)
if not res then
return nil, err, nil
else
return true, "deleted", nil
end
end
-- If we're revalidating, fire off the background job
local job
if purge_mode == "revalidate" then
job = handler:revalidate_in_background(key_chain, false)
end
-- Invalidate the keys
local ok, err = expire_keys(redis, storage, key_chain, handler:entity_id(key_chain))
if not ok and err then
return nil, err, job
elseif not ok then
return false, "already expired", job
elseif ok then
return true, "purged", job
end
end
local function key_chain_from_full_key(root_key, full_key)
local pos = str_find(full_key, "#")
if pos == nil then
return nil
end
-- Remove the root_key from the start
local vary_key = str_sub(full_key, pos+1)
local vary_spec = {} -- We don't need this
return key_chain(root_key, vary_key, vary_spec)
end
-- Purges all representatinos of the cache item
local function purge(handler, purge_mode, repset)
local representations, err = handler.redis:smembers(repset)
if err then
return nil, err
end
if #representations == 0 then
return false, "nothing to purge", nil
end
local root_key = str_sub(repset, 1, repset_len)
local res_ok, res_message
local jobs = {}
local key_chain
for _, full_key in ipairs(representations) do
key_chain = key_chain_from_full_key(root_key, full_key)
local ok, message, job = _purge(handler, purge_mode, key_chain)
-- Set the overall response if any representation was purged
if res_ok == nil or ok == true then
res_ok = ok
res_message = message
end
tbl_insert(jobs, job)
end
-- Clean up vary and repset keys if we're deleting
if purge_mode == "delete" and res_ok then
local _, e = handler.redis:del(key_chain.repset, key_chain.vary)
if e then ngx_log(ngx_ERR, e) end
end
return res_ok, res_message, jobs
end
_M.purge = purge
local function purge_in_background(handler, purge_mode)
local key_chain = handler:cache_key_chain()
local job, err = put_background_job(
"ledge_purge",
"ledge.jobs.purge",
{
repset = key_chain.repset,
keyspace_scan_count = handler.config.keyspace_scan_count,
purge_mode = purge_mode,
storage_driver = handler.config.storage_driver,
storage_driver_config = handler.config.storage_driver_config,
},
{
jid = ngx_md5("purge:" .. tostring(key_chain.root)),
tags = { "purge" },
priority = 5,
}
)
if err then ngx_log(ngx_ERR, err) end
-- Create a JSON payload for the response
local res = create_purge_response(purge_mode, "scheduled", {job})
handler.response:set_body(res)
return true
end
_M.purge_in_background = purge_in_background
local function parse_json_req()
ngx.req.read_body()
local body, err = ngx.req.get_body_data()
if not body then
return nil, "Could not read request body: " .. tostring(err)
end
local ok, req = pcall(cjson_decode, body)
if not ok then
return nil, "Could not parse request body: " .. tostring(req)
end
return req
end
local function validate_api_request(req)
local uris = req["uris"]
if not uris then
return false, "No URIs provided"
end
if type(uris) ~= "table" then
return false, "Field 'uris' must be an array"
end
if #uris == 0 then
return false, "No URIs provided"
end
local mode = req["purge_mode"]
if mode and not (
mode == "invalidate"
or mode == "revalidate"
or mode == "delete"
) then
return false, "Invalid purge_mode"
end
return true
end
local function send_purge_request(uri, purge_mode, headers)
local uri_parts, err = http:parse_uri(uri)
if not uri_parts then
return nil, err
end
local scheme, host, port, path = unpack(uri_parts)
-- TODO: timeouts
local httpc = http.new()
local ok, err = httpc:connect(ngx_var.server_addr, port)
if not ok then
return nil, "HTTP Connect ("..ngx_var.server_addr..":"..port.."): "..err
end
if scheme == "https" then
local ok, err = httpc:ssl_handshake(nil, host, false)
if not ok then
return nil, "SSL Handshake: "..err
end
end
headers = headers or {}
headers["Host"] = host
headers["X-Purge"] = purge_mode
local res, err = httpc:request({
method = "PURGE",
path = path,
headers = headers
})
if not res then
return nil, "HTTP Request: "..err
end
local body, err = res:read_body()
if not body then
return nil, "HTTP Response: "..err
end
local ok, err = httpc:set_keepalive()
if not ok then ngx_log(ngx_ERR, err) end
if res.headers["Content-Type"] == "application/json" then
body = cjson_decode(body)
else
return nil, { status = res.status, body = body, headers = res.headers}
end
return body
end
-- Run the JSON PURGE API.
-- Accepts various inputs from a JSON request body and processes purges
-- Return true on success or false on error
local function purge_api(handler)
local response = handler.response
local request, err = parse_json_req()
if not request then
response.status = ngx_HTTP_BAD_REQUEST
response:set_body(cjson_encode({["error"] = err}))
return false
end
local ok, err = validate_api_request(request)
if not ok then
response.status = ngx_HTTP_BAD_REQUEST
response:set_body(cjson_encode({["error"] = err}))
return false
end
local purge_mode = request["purge_mode"] or "invalidate" -- Default to invalidating
local api_results = {}
local uris = request["uris"]
for _, uri in ipairs(uris) do
local res, err = send_purge_request(uri, purge_mode, request["headers"])
if not res then
res = {["error"] = err}
elseif type(res) == "table" then
res["purge_mode"] = nil
end
api_results[uri] = res
end
local api_response, err = create_purge_response(purge_mode, api_results)
if not api_response then
handler.set:body(cjson_encode({["error"] = "JSON Response Error: "..tostring(err)}))
return false
end
handler.response:set_body(api_response)
return true
end
_M.purge_api = purge_api
return setmetatable(_M, fixed_field_metatable)
================================================
FILE: lib/ledge/range.lua
================================================
local setmetatable, tonumber, ipairs, type =
setmetatable, tonumber, ipairs, type
local str_match = string.match
local str_sub = string.sub
local str_randomhex = require("ledge.util").string.randomhex
local str_split = require("ledge.util").string.split
local tbl_insert = table.insert
local tbl_sort = table.sort
local tbl_remove = table.remove
local tbl_concat = table.concat
local ngx_re_match = ngx.re.match
local ngx_log = ngx.log
local ngx_ERR = ngx.ERR
local get_header_token = require("ledge.header_util").get_header_token
local co_yield = coroutine.yield
local co_wrap = require("ledge.util").coroutine.wrap
local get_fixed_field_metatable_proxy =
require("ledge.util").mt.get_fixed_field_metatable_proxy
local ngx_req_get_headers = ngx.req.get_headers
local ngx_RANGE_NOT_SATISFIABLE = 416
local ngx_PARTIAL_CONTENT = 206
local _M = {
_VERSION = "2.3.0",
}
function _M.new()
return setmetatable({
ranges = {},
boundary_end = "",
boundary = "",
}, get_fixed_field_metatable_proxy(_M))
end
-- returns a table of ranges, or nil
--
-- e.g.
-- {
-- { from = 0, to = 99 },
-- { from = 100, to = 199 },
-- }
local function req_byte_ranges()
local bytes = get_header_token(ngx_req_get_headers().range, "bytes")
local ranges = nil
if bytes then
ranges = str_split(bytes, ",")
if not ranges then ranges = { bytes } end
for i,r in ipairs(ranges) do
local from, to = str_match(r, "(%d*)%-(%d*)")
ranges[i] = { from = tonumber(from), to = tonumber(to) }
end
end
return ranges
end
_M.req_byte_ranges = req_byte_ranges
local function sort_byte_ranges(first, second)
if not first.from or not second.from then
return nil, "Attempt to compare invalid byteranges"
end
return first.from <= second.from
end
local function parse_content_range(content_range)
local m, err = ngx_re_match(
content_range,
[[bytes\s+(\d+|\*)-(\d+|\*)/(\d+)]],
"oj"
)
if err then ngx_log(ngx_ERR, err) end
if not m then
return nil
else
return tonumber(m[1]), tonumber(m[2]), tonumber(m[3])
end
end
_M.parse_content_range = parse_content_range
-- Modifies the response based on range request headers.
-- Returns the response and a flag, which if true indicates a partial response
-- should be expected, if false indicates the range could not be applied, and if
-- nil indicates no range was requested.
function _M.handle_range_request(self, res)
local range_request = req_byte_ranges()
if range_request and type(range_request) == "table" and res.size then
-- Don't attempt range filtering on non 200 responses
if res.status ~= 200 then
return res, false
end
local ranges = {}
for _,range in ipairs(range_request) do
local range_satisfiable = true
if not range.to and not range.from then
range_satisfiable = false
end
-- A missing "to" means to the "end".
if not range.to then
range.to = res.size - 1
end
-- A missing "from" means "to" is an offset from the end.
if not range.from then
range.from = res.size - (range.to)
range.to = res.size - 1
if range.from < 0 then
range_satisfiable = false
end
end
-- A "to" greater than size should be "end"
if range.to > (res.size - 1) then
range.to = res.size - 1
end
-- Check the range is satisfiable
if range.from > range.to then
range_satisfiable = false
end
if not range_satisfiable then
-- We'll return 416
res.status = ngx_RANGE_NOT_SATISFIABLE
res.body_reader = res.empty_body_reader
res.header.content_range = "bytes */" .. res.size
return res, false
else
-- We'll need the content range header value
-- for multipart boundaries: e.g. bytes 5-10/20
range.header = "bytes " .. range.from ..
"-" .. range.to ..
"/" .. res.size
tbl_insert(ranges, range)
end
end
local numranges = #ranges
if numranges > 1 then
-- Sort ranges as we cannot serve unordered.
tbl_sort(ranges, sort_byte_ranges)
-- Coalesce overlapping ranges.
for i = numranges,1,-1 do
if i > 1 then
local current_range = ranges[i]
local previous_range = ranges[i - 1]
if current_range.from <= previous_range.to then
-- extend previous range to encompass this one
previous_range.to = current_range.to
previous_range.header = "bytes " ..
previous_range.from ..
"-" ..
current_range.to ..
"/" ..
res.size
tbl_remove(ranges, i)
end
end
end
end
self.ranges = ranges
if #ranges == 1 then
-- We have a single range to serve.
local range = ranges[1]
local size = res.size
res.status = ngx_PARTIAL_CONTENT
ngx.header["Accept-Ranges"] = "bytes"
res.header["Content-Range"] = "bytes " .. range.from ..
"-" .. range.to ..
"/" .. size
return res, true
else
-- Generate boundary
local boundary_string = str_randomhex(32)
local boundary = {
"",
"--" .. boundary_string,
}
if res.header["Content-Type"] then
tbl_insert(
boundary,
"Content-Type: " .. res.header["Content-Type"]
)
end
self.boundary = tbl_concat(boundary, "\n")
self.boundary_end = "\n--" .. boundary_string .. "--"
res.status = ngx_PARTIAL_CONTENT
-- TODO: No test coverage for these headers
res.header["Accept-Ranges"] = "bytes"
res.header["Content-Type"] = "multipart/byteranges; boundary=" ..
boundary_string
return res, true
end
end
return res, nil
end
-- Filters the body reader, only yielding bytes specified in a range request.
function _M.get_range_request_filter(self, reader)
local ranges = self.ranges
local boundary_end = self.boundary_end
local boundary = self.boundary
if ranges then
return co_wrap(function(buffer_size)
local playhead = 0
local num_ranges = #ranges
while true do
local chunk, err = reader(buffer_size)
if err then ngx_log(ngx_ERR, err) end
if not chunk then break end
local chunklen = #chunk
local nextplayhead = playhead + chunklen
for _, range in ipairs(ranges) do
if range.from >= nextplayhead or range.to < playhead then -- luacheck: ignore 542
-- Skip over non matching ranges (this is
-- algorithmically simpler)
else
-- Yield the multipart byterange boundary if
-- required and only once per range.
if num_ranges > 1 and not range.boundary_printed then
co_yield(boundary)
co_yield("\nContent-Range: " .. range.header)
co_yield("\n\n")
range.boundary_printed = true
end
-- Trim range to within this chunk's context
local yield_from = range.from
local yield_to = range.to
if range.from < playhead then
yield_from = playhead
end
if range.to >= nextplayhead then
yield_to = nextplayhead - 1
end
-- Find relative points for the range within this chunk
local relative_yield_from = yield_from - playhead
local relative_yield_to = yield_to - playhead
-- Ranges are all 0 indexed, finally convert to 1 based
-- Lua indexes, and yield the range.
co_yield(
str_sub(
chunk,
relative_yield_from + 1,
relative_yield_to + 1
)
)
end
end
playhead = playhead + chunklen
end
-- Yield the multipart byterange end marker
if num_ranges > 1 then
co_yield(boundary_end)
end
end)
end
return reader
end
return _M
================================================
FILE: lib/ledge/request.lua
================================================
local hdr_has_directive = require("ledge.header_util").header_has_directive
local ngx_req_get_headers = ngx.req.get_headers
local ngx_re_gsub = ngx.re.gsub
local ngx_req_get_uri_args = ngx.req.get_uri_args
local ngx_req_get_method = ngx.req.get_method
local str_byte = string.byte
local ngx_var = ngx.var
local tbl_sort = table.sort
local tbl_insert = table.insert
local _M = {
_VERSION = "2.3.0",
}
local function purge_mode()
local x_purge = ngx_req_get_headers()["X-Purge"]
if hdr_has_directive(x_purge, "delete") then
return "delete"
elseif hdr_has_directive(x_purge, "revalidate") then
return "revalidate"
else
return "invalidate"
end
end
_M.purge_mode = purge_mode
local function relative_uri()
local uri = ngx_re_gsub(ngx_var.uri, "\\s", "%20", "jo") -- encode spaces
-- encode percentages if an encoded CRLF is in the URI
-- see: http://resources.infosecinstitute.com/http-response-splitting-attack
uri = ngx_re_gsub(uri, "%0D%0A", "%250D%250A", "ijo")
return uri .. ngx_var.is_args .. (ngx_var.query_string or "")
end
_M.relative_uri = relative_uri
local function full_uri()
return ngx_var.scheme .. '://' .. ngx_var.host .. relative_uri()
end
_M.full_uri = full_uri
local function accepts_cache()
-- Check for no-cache
local h = ngx_req_get_headers()
if hdr_has_directive(h["Pragma"], "no-cache")
or hdr_has_directive(h["Cache-Control"], "no-cache")
or hdr_has_directive(h["Cache-Control"], "no-store") then
return false
end
return true
end
_M.accepts_cache = accepts_cache
local function sort_args(a, b)
return a[1] < b[1]
end
local function args_sorted(max_args)
max_args = max_args or 100
local args = ngx_req_get_uri_args(max_args)
if not next(args) then return nil end
local sorted = {}
for k, v in pairs(args) do
tbl_insert(sorted, { k, v })
end
tbl_sort(sorted, sort_args)
local sargs = ""
local sortedln = #sorted
for i, v in ipairs(sorted) do
sargs = sargs .. ngx.encode_args({ [v[1]] = v[2] })
if i < sortedln then sargs = sargs .. "&" end
end
return sargs
end
_M.args_sorted = args_sorted
-- Used to generate a default args string for the cache key (i.e. when there are
-- no URI args present).
--
-- Returns a zero length string, unless there is an asterisk at the end of the
-- URI on a PURGE request, in which case we return the asterisk.
--
-- The purpose it to ensure trailing wildcards are greedy across both URI and
-- args portions of a cache key.
--
-- If you override the "args" field in a cache key spec with your own function,
-- you'll want to use this to ensure wildcard purges operate correctly.
local function default_args()
if ngx_req_get_method() == "PURGE" and
str_byte(ngx_var.request_uri, -1) == 42
then
return "*"
end
return ""
end
_M.default_args = default_args
return _M
================================================
FILE: lib/ledge/response.lua
================================================
local http_headers = require "resty.http_headers"
local util = require "ledge.util"
local pairs, setmetatable, tonumber, unpack =
pairs, setmetatable, tonumber, unpack
local tbl_getn = table.getn
local tbl_insert = table.insert
local tbl_concat = table.concat
local tbl_sort = table.sort
local str_lower = string.lower
local str_find = string.find
local str_sub = string.sub
local str_rep = string.rep
local str_randomhex = util.string.randomhex
local str_split = util.string.split
local ngx_null = ngx.null
local ngx_log = ngx.log
local ngx_ERR = ngx.ERR
local ngx_INFO = ngx.INFO
local ngx_DEBUG = ngx.DEBUG
local ngx_re_gmatch = ngx.re.gmatch
local ngx_parse_http_time = ngx.parse_http_time
local ngx_http_time = ngx.http_time
local ngx_time = ngx.time
local ngx_re_find = ngx.re.find
local ngx_re_gsub = ngx.re.gsub
local header_has_directive = require("ledge.header_util").header_has_directive
local get_fixed_field_metatable_proxy =
require("ledge.util").mt.get_fixed_field_metatable_proxy
local save_key_chain = require("ledge.cache_key").save_key_chain
local _DEBUG = false
local _M = {
_VERSION = "2.3.0",
set_debug = function(debug) _DEBUG = debug end,
}
-- Body reader for when the response body is missing
local function empty_body_reader()
return nil
end
_M.empty_body_reader = empty_body_reader
function _M.new(handler)
if not handler or not next(handler) then
return nil, "Handler is required"
end
if not handler.redis or not next(handler.redis) then
return nil, "Handler has no redis connection"
end
return setmetatable({
redis = handler.redis,
handler = handler, -- Cache key chain
uri = "",
status = 0,
header = http_headers.new(),
-- stored metadata
size = 0,
remaining_ttl = 0,
has_esi = false,
esi_scanned = false,
-- body
entity_id = "",
body_reader = empty_body_reader,
body_filters = {}, -- for debug logging
-- runtime metadata (not persisted)
length = 0, -- If Content-Length is present
has_body = false, -- From lua-resty-http has_body
}, get_fixed_field_metatable_proxy(_M))
end
-- Setter for a fixed body string (not streamed)
function _M.set_body(self, body_string)
local sent = false
self.body_reader = function()
if not sent then
sent = true
return body_string
else
return nil
end
end
end
function _M.filter_body_reader(self, filter_name, filter)
assert(type(filter) == "function", "filter must be a function")
if _DEBUG then
-- Keep track of the filters by name, just for debugging
ngx_log(ngx_DEBUG,
filter_name,
"(",
tbl_concat(self.body_filters,
"("), "" , str_rep(")", #self.body_filters - 1
),
")"
)
tbl_insert(self.body_filters, 1, filter_name)
end
self.body_reader = filter
end
function _M.is_cacheable(self)
-- Never cache partial content
local status = self.status
if status == 206 or status == 416 then
return false
end
local h = self.header
local directives = "(no-cache|no-store|private)"
if header_has_directive(h["Cache-Control"], directives, true) then
return false
end
if header_has_directive(h["Pragma"], "no-cache", true) then
return false
end
if h["Vary"] == "*" then
return false
end
if self:ttl() > 0 then
return true
else
return false
end
end
-- Calculates the TTL from response headers.
-- Header precedence is Cache-Control: s-maxage=NUM, Cache-Control: max-age=NUM
-- and finally Expires: HTTP_TIMESTRING.
function _M.ttl(self)
local cc = self.header["Cache-Control"]
if cc then
if type(cc) == "table" then
cc = tbl_concat(cc, ", ")
end
local max_ages = {}
for max_age in ngx_re_gmatch(cc, [[(s-maxage|max-age)=(\d+)]], "ijo") do
max_ages[max_age[1]] = max_age[2]
end
if max_ages["s-maxage"] then
return tonumber(max_ages["s-maxage"])
elseif max_ages["max-age"] then
return tonumber(max_ages["max-age"])
end
end
-- Fall back to Expires.
local expires = self.header["Expires"]
if expires then
-- If there are multiple, last one wins
if type(expires) == "table" then
expires = expires[#expires]
end
local time = ngx_parse_http_time(tostring(expires))
if time then return time - ngx_time() end
end
return 0
end
function _M.has_expired(self)
return self.remaining_ttl <= 0
end
-- Return nil and an error on an actual Redis error, this indicates that Redis
-- has failed and we aren't going to be able to proceed normally.
-- Return nil and *no* error if this is just a broken/partial cache entry
-- so we MISS and update the entry.
function _M.read(self)
local key_chain, err = self.handler:cache_key_chain()
if not key_chain then
return nil, err
end
local redis = self.redis
-- Read main metdata
local cache_parts, err = redis:hgetall(key_chain.main)
if not cache_parts or cache_parts == ngx_null then
return nil, err
end
-- No cache entry for this key
local cache_parts_len = #cache_parts
if not cache_parts_len or cache_parts_len == 0 then
return nil
end
local time_in_cache = 0
local time_since_generated = 0
-- The Redis replies is a sequence of messages, so we iterate over pairs
-- to get hash key/values.
for i = 1, cache_parts_len, 2 do
if cache_parts[i] == "uri" then
self.uri = cache_parts[i + 1]
elseif cache_parts[i] == "status" then
self.status = tonumber(cache_parts[i + 1])
elseif cache_parts[i] == "entity" then
self.entity_id = cache_parts[i + 1]
elseif cache_parts[i] == "expires" then
self.remaining_ttl = tonumber(cache_parts[i + 1]) - ngx_time()
elseif cache_parts[i] == "saved_ts" then
time_in_cache = ngx_time() - tonumber(cache_parts[i + 1])
elseif cache_parts[i] == "generated_ts" then
time_since_generated = ngx_time() - tonumber(cache_parts[i + 1])
elseif cache_parts[i] == "has_esi" then
self.has_esi = cache_parts[i + 1]
elseif cache_parts[i] == "esi_scanned" then
local scanned = cache_parts[i + 1]
if scanned == "false" then
self.esi_scanned = false
else
self.esi_scanned = true
end
elseif cache_parts[i] == "size" then
self.size = tonumber(cache_parts[i + 1])
end
end
-- Read headers
local headers, err = redis:hgetall(key_chain.headers)
if not headers or headers == ngx_null then
return nil, err
end
local headers_len = tbl_getn(headers)
if headers_len == 0 then
ngx_log(ngx_INFO, "headers missing")
return nil
end
for i = 1, headers_len, 2 do
local header = headers[i]
if str_find(header, ":") then
-- We have multiple headers with the same field name
local _, key = unpack(str_split(header, ":"))
if not self.header[key] then
self.header[key] = {}
end
tbl_insert(self.header[key], headers[i + 1])
else
self.header[header] = headers[i + 1]
end
end
-- Calculate the Age header
if self.header["Age"] then
-- We have end-to-end Age headers, add our time_in_cache.
self.header["Age"] = tonumber(self.header["Age"]) + time_in_cache
elseif self.header["Date"] then
-- We have no advertised Age, use the generated timestamp.
self.header["Age"] = time_since_generated
end
-- "touch" other keys not needed for read, so that they are
-- less likely to be unfairly evicted ahead of time
-- Note: From Redis 3.2.1 this could be one TOUCH command
local _ = redis:hlen(key_chain.reval_params)
local _ = redis:hlen(key_chain.reval_req_headers)
if self.size > 0 then
local entities, err = redis:scard(key_chain.entities)
if not entities or entities == ngx_null then
return nil, "could not read entities set: " .. err
elseif entities == 0 then
ngx_log(ngx_INFO, "entities set is empty")
return nil
end
end
-- Check this key is in the repset
local scard, err = redis:sismember(key_chain.repset, key_chain.full)
if err then
return nil, err
end
-- Got a cache entry but missing from repset or repset missing, bad...
-- Call save_key_chain which will add this rep to the repset
if scard == 0 then
local repset_ttl = redis:ttl(key_chain.main)
local ok, err = save_key_chain(redis, key_chain, repset_ttl)
if not ok then
return nil, err
end
end
return true
end
-- Takes headers from a HTTP response and returns a flat table of cacheable
-- header entries formatted for Redis.
local function prepare_cacheable_headers(headers)
-- Don't cache any headers marked as
-- Cache-Control: (no-cache|no-store|private)="header".
local uncacheable_headers = {}
local cc = headers["Cache-Control"]
if cc then
if type(cc) == "table" then cc = tbl_concat(cc, ", ") end
cc = str_lower(cc)
if str_find(cc, "=", 1, true) then
local pattern = '(?:no-cache|private)="?([0-9a-z-]+)"?'
local re_ctx = {}
repeat
local from, to, err = ngx_re_find(cc, pattern, "jo", re_ctx, 1)
if from then
uncacheable_headers[str_sub(cc, from, to)] = true
elseif err then
ngx_log(ngx_ERR, err)
end
until not from
end
end
-- Turn the headers into a flat list of pairs for the Redis query.
local h = {}
for header,header_value in pairs(headers) do
if not uncacheable_headers[str_lower(header)] then
if type(header_value) == 'table' then
-- Multiple headers are represented as a table of values
local header_value_len = tbl_getn(header_value)
for i = 1, header_value_len do
tbl_insert(h, i..':'..header)
tbl_insert(h, header_value[i])
end
else
tbl_insert(h, header)
tbl_insert(h, header_value)
end
end
end
return h
end
function _M.save(self, keep_cache_for)
if not keep_cache_for then keep_cache_for = 0 end
-- Create a new entity id
self.entity_id = str_randomhex(32)
local ttl = self:ttl()
local time = ngx_time()
local redis = self.redis
if not next(redis) then return nil, "no redis" end
local key_chain = self.handler:cache_key_chain()
if not self.header["Date"] then
self.header["Date"] = ngx_http_time(ngx_time())
end
local ok, err = redis:del(key_chain.main)
if not ok then ngx_log(ngx_ERR, err) end
local ok, err = redis:hmset(key_chain.main,
"entity", self.entity_id,
"status", self.status,
"uri", self.uri,
"expires", ttl + time,
"generated_ts", ngx_parse_http_time(self.header["Date"]),
"saved_ts", time,
"esi_scanned", tostring(self.esi_scanned) -- from bool
)
if not ok then ngx_log(ngx_ERR, err) end
local h = prepare_cacheable_headers(self.header)
ok, err = redis:del(key_chain.headers)
if not ok then ngx_log(ngx_ERR, err) end
ok, err = redis:hmset(key_chain.headers, unpack(h))
if not ok then ngx_log(ngx_ERR, err) end
-- Mark the keys as eventually volatile (the body is set by the body writer)
local expiry = ttl + tonumber(keep_cache_for)
ok, err = redis:expire(key_chain.main, expiry)
if not ok then ngx_log(ngx_ERR, err) end
ok, err = redis:expire(key_chain.headers, expiry)
if not ok then ngx_log(ngx_ERR, err) end
local ok, err = redis:sadd(key_chain.entities, self.entity_id)
if not ok then
ngx_log(ngx_ERR, "error adding entity to set: ", err)
end
ok, err = redis:expire(key_chain.entities, expiry)
if not ok then ngx_log(ngx_ERR, err) end
return true
end
function _M.set_and_save(self, field, value)
local redis = self.redis
local ok, err = redis:hset(self.handler:cache_key_chain().main, field, tostring(value))
if not ok then
if err then ngx_log(ngx_ERR, err) end
return nil, err
end
self[field] = value
return ok
end
local WARNINGS = {
["110"] = "Response is stale",
["214"] = "Transformation applied",
["112"] = "Disconnected Operation",
}
function _M.add_warning(self, code, name)
if not self.header["Warning"] then
self.header["Warning"] = {}
end
local header = code .. ' ' .. name
header = header .. ' "' .. WARNINGS[code] .. '"'
tbl_insert(self.header["Warning"], header)
end
local function deduplicate_table(table)
-- Can't have duplicates if there's 1 or 0 entries!
if #table <= 1 then
return table
end
local new_table = {}
local unique = {}
local i = 0
for _,v in ipairs(table) do
if not unique[v] then
unique[v] = true
i = i +1
new_table[i] = v
end
end
return new_table
end
function _M.parse_vary_header(self)
local vary_hdr = self.header["Vary"]
local vary_spec
if vary_hdr and vary_hdr ~= "" then
if type(vary_hdr) == "table" then
vary_hdr = tbl_concat(vary_hdr,",")
end
-- Remove whitespace around commas and lowercase
vary_hdr = ngx_re_gsub(str_lower(vary_hdr), [[\s*,\s*]], ",", "oj")
vary_spec = str_split(vary_hdr, ",")
tbl_sort(vary_spec)
vary_spec = deduplicate_table(vary_spec)
end
-- Return the new vary sepc table *and* the normalised header
return vary_spec
end
return _M
================================================
FILE: lib/ledge/stale.lua
================================================
local math_min = math.min
local ngx_req_get_headers = ngx.req.get_headers
local header_has_directive = require("ledge.header_util").header_has_directive
local get_numeric_header_token =
require("ledge.header_util").get_numeric_header_token
local _M = {
_VERSION = "2.3.0"
}
-- True if the request specifically asks for stale (req.cc.max-stale) and the
-- response doesn't explicitly forbid this res.cc.(must|proxy)-revalidate.
local function can_serve_stale(res)
local req_cc = ngx_req_get_headers()["Cache-Control"]
local req_cc_max_stale = get_numeric_header_token(req_cc, "max-stale")
if req_cc_max_stale then
local res_cc = res.header["Cache-Control"]
-- Check the response permits this at all
if header_has_directive(res_cc, "(must|proxy)-revalidate") then
return false
else
if (req_cc_max_stale * -1) <= res.remaining_ttl then
return true
end
end
end
return false
end
_M.can_serve_stale = can_serve_stale
-- Returns true if stale-while-revalidate or stale-if-error is specified, valid
-- and not constrained by other factors such as max-stale.
-- @param token "stale-while-revalidate" | "stale-if-error"
local function verify_stale_conditions(res, token)
assert(token == "stale-while-revalidate" or token == "stale-if-error",
"unknown token: " .. tostring(token))
local res_cc = res.header["Cache-Control"]
local res_cc_stale = get_numeric_header_token(res_cc, token)
-- Check the response permits this at all
if header_has_directive(res_cc, "(must|proxy)-revalidate") then
return false
end
-- Get request header tokens
local req_cc = ngx_req_get_headers()["Cache-Control"]
local req_cc_stale = get_numeric_header_token(req_cc, token)
local req_cc_max_age = get_numeric_header_token(req_cc, "max-age")
local req_cc_max_stale = get_numeric_header_token(req_cc, "max-stale")
local stale_ttl = 0
-- If we have both req and res stale-" .. reason, use the lower value
if req_cc_stale and res_cc_stale then
stale_ttl = math_min(req_cc_stale, res_cc_stale)
-- Otherwise return the req or res value
elseif req_cc_stale then
stale_ttl = req_cc_stale
elseif res_cc_stale then
stale_ttl = res_cc_stale
end
if stale_ttl <= 0 then
return false -- No stale policy defined
elseif header_has_directive(req_cc, "min-fresh") then
return false -- Cannot serve stale as request demands freshness
elseif req_cc_max_age and
req_cc_max_age < (tonumber(res.header["Age"] or 0) or 0) then
return false -- Cannot serve stale as req max-age is less than res Age
elseif req_cc_max_stale and req_cc_max_stale < stale_ttl then
return false -- Cannot serve stale as req max-stale is less than S-W-R
else
-- We can return stale
return true
end
end
_M.verify_stale_conditions = verify_stale_conditions
local function can_serve_stale_while_revalidate(res)
return verify_stale_conditions(res, "stale-while-revalidate")
end
_M.can_serve_stale_while_revalidate = can_serve_stale_while_revalidate
local function can_serve_stale_if_error(res)
return verify_stale_conditions(res, "stale-if-error")
end
_M.can_serve_stale_if_error = can_serve_stale_if_error
return _M
================================================
FILE: lib/ledge/state_machine/actions.lua
================================================
local type, next = type, next
local esi = require("ledge.esi")
local response = require("ledge.response")
local ngx_var = ngx.var
local ngx_HTTP_NOT_MODIFIED = ngx.HTTP_NOT_MODIFIED
local ngx_req_set_header = ngx.req.set_header
local get_gzip_decoder = require("ledge.gzip").get_gzip_decoder
local _M = { -- luacheck: no unused
_VERSION = "2.3.0",
}
-- Actions. Functions which can be called on transition.
return {
redis_close = function(handler)
return require("ledge").close_redis_connection(handler.redis)
end,
httpc_close = function(handler)
local upstream_client = handler.upstream_client
if next(upstream_client) then
if type(upstream_client.set_keepalive) == "function" then
local config = handler.config
return upstream_client:set_keepalive(
config.upstream_keepalive_timeout,
config.upstream_keepalive_poolsize
)
end
end
end,
httpc_close_without_keepalive = function(handler)
local upstream_client = handler.upstream_client
if next(upstream_client) then
return upstream_client:close()
end
end,
stash_error_response = function(handler)
handler.error_response = handler.response
end,
restore_error_response = function(handler)
local error_res = handler.error_response
if next(error_res) then
handler.response = error_res
end
end,
-- If ESI is enabled and we have an esi_args prefix, weed uri args
-- beginning with the prefix (knows as ESI_ARGS) out of the URI (and thus
-- cache key) and stash them in the custom ESI variables table.
filter_esi_args = function(handler)
if handler.config.esi_enabled then
esi.filter_esi_args(handler)
end
end,
read_cache = function(handler)
handler.response = handler:read_from_cache()
end,
install_no_body_reader = function(handler)
local res = handler.response
res.body_reader = res.empty_body_reader
end,
install_gzip_decoder = function(handler)
local res = handler.response
res.header["Content-Encoding"] = nil
res:filter_body_reader(
"gzip_decoder",
get_gzip_decoder(res.body_reader)
)
end,
install_range_filter = function(handler)
local res = handler.response
local range = handler.range
res:filter_body_reader(
"range_request_filter",
range:get_range_request_filter(res.body_reader)
)
end,
set_esi_scan_enabled = function(handler)
handler.esi_scan_enabled = true
--handler.esi_scan_disabled = false
handler.response.esi_scanned = true
end,
install_esi_scan_filter = function(handler)
local res = handler.response
local esi_processor = handler.esi_processor
if next(esi_processor) then
res:filter_body_reader(
"esi_scan_filter",
esi_processor:get_scan_filter(res)
)
end
end,
set_esi_scan_disabled = function(handler)
local res = handler.response
handler.esi_scan_enabled = false
res.esi_scanned = false
end,
install_esi_process_filter = function(handler)
local res = handler.response
local esi_processor = handler.esi_processor
if next(esi_processor) then
res:filter_body_reader(
"esi_process_filter",
esi_processor:get_process_filter(res)
)
end
end,
set_esi_process_enabled = function(handler)
handler.esi_process_enabled = true
end,
set_esi_process_disabled = function(handler)
handler.esi_process_enabled = false
end,
zero_downstream_lifetime = function(handler)
local res = handler.response
if res.header then
res.header["Cache-Control"] = "private, max-age=0"
end
end,
remove_surrogate_control_header = function(handler)
local res = handler.response
if res.header then
res.header["Surrogate-Control"] = nil
end
end,
fetch = function(handler)
local res = handler:fetch_from_origin()
if res.status ~= ngx_HTTP_NOT_MODIFIED then
handler.response = res
end
end,
remove_client_validators = function(handler)
-- Keep these in case we need to restore them (after revalidating upstream)
local client_validators = handler.client_validators
client_validators["If-Modified-Since"] = ngx_var.http_if_modified_since
client_validators["If-None-Match"] = ngx_var.http_if_none_match
ngx_req_set_header("If-Modified-Since", nil)
ngx_req_set_header("If-None-Match", nil)
end,
restore_client_validators = function(handler)
local client_validators = handler.client_validators
ngx_req_set_header("If-Modified-Since", client_validators["If-Modified-Since"])
ngx_req_set_header("If-None-Match", client_validators["If-None-Match"])
end,
add_stale_warning = function(handler)
return handler:add_warning("110")
end,
add_disconnected_warning = function(handler)
return handler:add_warning("112")
end,
set_json_response = function(handler)
local res = response.new(handler)
res.header["Content-Type"] = "application/json"
handler.response = res
end,
-- Updates the realidation_params key with data from the current request,
-- and schedules a background revalidation job
revalidate_in_background = function(handler)
return handler:revalidate_in_background(handler:cache_key_chain(), true)
end,
-- Triggered on upstream partial content, assumes no stored
-- revalidation metadata but since we have a rqeuest context (which isn't
-- the case with `revalidate_in_background` we can simply fetch.
fetch_in_background = function(handler)
return handler:fetch_in_background()
end,
save_to_cache = function(handler)
local res = handler.response
return handler:save_to_cache(res)
end,
delete_from_cache = function(handler)
return handler:delete_from_cache(handler:cache_key_chain())
end,
disable_output_buffers = function(handler)
handler.output_buffers_enabled = false
end,
reset_cache_key = function(handler)
handler:reset_cache_key()
end,
set_http_ok = function()
ngx.status = ngx.HTTP_OK
end,
set_http_not_found = function()
ngx.status = ngx.HTTP_NOT_FOUND
end,
set_http_not_modified = function()
ngx.status = ngx_HTTP_NOT_MODIFIED
end,
set_http_service_unavailable = function()
ngx.status = ngx.HTTP_SERVICE_UNAVAILABLE
end,
set_http_gateway_timeout = function()
ngx.status = ngx.HTTP_GATEWAY_TIMEOUT
end,
set_http_internal_server_error = function()
ngx.status = ngx.HTTP_INTERNAL_SERVER_ERROR
end,
set_http_status_from_response = function(handler)
local res = handler.response
if res and res.status then
ngx.status = res.status
else
ngx.status = ngx.HTTP_INTERNAL_SERVER_ERROR
end
end,
}
================================================
FILE: lib/ledge/state_machine/events.lua
================================================
local _M = { -- luacheck: no unused
_VERSION = "2.3.0",
}
-- Event transition table.
--
-- Use "begin" to transition based on an event. Filter transitions by current
-- state "when", and/or any previous state "after", and/or a previously fired
-- event "in_case", and run actions using "but_first". Transitions are processed
-- in the order found, so place more specific entries for a given event before
-- more generic ones.
return {
-- Initial transition (entry point). Connect to redis.
init = {
{ begin = "checking_method", but_first = "filter_esi_args" },
},
cacheable_method = {
{ when = "checking_origin_mode", begin = "checking_request" },
{ begin = "checking_origin_mode" },
},
-- PURGE method detected.
purge_requested = {
{
when = "considering_wildcard_purge",
begin = "purging",
but_first = "set_json_response"
},
{
when = "considering_purge_api",
begin = "considering_wildcard_purge"
},
{ begin = "considering_purge_api" },
},
purge_api_requested = {
{
begin = "purging_via_api",
but_first = "set_json_response"
},
},
wildcard_purge_requested = {
{ begin = "wildcard_purging", but_first = "set_json_response" },
},
-- Succesfully purged (expired) a cache entry. Exit 200 OK.
purged = {
{ begin = "serving", but_first = "set_http_ok" },
},
wildcard_purge_scheduled = {
{ begin = "serving", but_first = "set_http_ok" },
},
purge_api_completed = {
{ begin = "serving", but_first = "set_http_ok" },
},
purge_api_failed = {
{ begin = "serving", but_first = "set_http_status_from_response" },
},
-- URI to purge was not found. Exit 404 Not Found.
nothing_to_purge = {
{ begin = "serving", but_first = "set_http_not_found" },
},
-- The request accepts cache. If we've already validated locally, we can
-- think about serving. Otherwise we need to check the cache situtation.
cache_accepted = {
{ when = "revalidating_locally", begin = "considering_esi_process" },
{ begin = "checking_cache" },
},
forced_cache = {
{ begin = "accept_cache" },
},
-- This request doesn't accept cache, so we need to see about fetching
cache_not_accepted = {
{ begin = "checking_can_fetch" },
},
-- We don't know anything about this URI, so we've got to see about fetching
cache_missing = {
{ begin = "checking_can_fetch" },
},
-- This URI was cacheable last time, but has expired. So see about serving
-- stale, but failing that, see about fetching.
cache_expired = {
{ when = "checking_cache", begin = "checking_can_serve_stale" },
{ when = "checking_can_serve_stale", begin = "checking_can_fetch" },
{ when = "checking_can_serve_stale", begin = "checking_can_fetch" },
},
-- We have a (not expired) cache entry. Lets try and validate in case we can
-- exit 304.
cache_valid = {
{ in_case = "forced_cache", begin = "considering_esi_process" },
{
in_case = "collapsed_response_ready",
begin = "considering_local_revalidation"
},
{ when = "checking_cache", begin = "considering_revalidation" },
},
-- We need to fetch, and there are no settings telling us we shouldn't, but
-- collapsed forwarding is on, so if cache is accepted and in an "expired"
-- state (i.e. not missing), lets try to collapse. Otherwise we just start
-- fetching.
can_fetch_but_try_collapse = {
{ in_case = "cache_missing", begin = "fetching" },
{ in_case = "cache_accepted", begin = "requesting_collapse_lock" },
{ begin = "fetching" },
},
-- We have the lock on this "fetch". We might be the only one. We'll never
-- know. But we fetch as "surrogate" in case others are listening.
obtained_collapsed_forwarding_lock = {
{ begin = "fetching_as_surrogate" },
},
-- Another request is currently fetching, so we've subscribed to updates on
-- this URI. We need to block until we hear something (or timeout).
subscribed_to_collapsed_forwarding_channel = {
{ begin = "waiting_on_collapsed_forwarding_channel" },
},
-- Another request was fetching when we asked, but by the time we subscribed
-- the channel was closed (small window, but potentially possible). Chances
-- are the item is now in cache, so start there.
collapsed_forwarding_channel_closed = {
{ begin = "checking_cache" },
},
-- We were waiting on a collapse channel, and got a message saying the
-- response is now ready. The item will now be fresh in cache.
collapsed_response_ready = {
{ begin = "checking_cache" },
},
-- We were waiting on another request (collapsed), but it came back as a
-- non-cacheable response (i.e. the previously cached item is no longer
-- cacheable). So go fetch for ourselves.
collapsed_forwarding_failed = {
{ begin = "fetching" },
},
-- We were waiting on another request, but it received an upstream_error
-- (e.g. 500) Check if we can serve stale content instead
collapsed_forwarding_upstream_error = {
{ begin = "considering_stale_error" },
},
-- We were waiting on another request, but the vary key changed
-- Might still match so check the cache again
collapsed_forwarding_vary_modified = {
{ begin = "checking_cache", but_first = "reset_cache_key" },
},
-- We need to fetch and nothing is telling us we shouldn't.
-- Collapsed forwarding is not enabled.
can_fetch = {
{ begin = "fetching" },
},
-- We've fetched and got a response status and headers. We should consider
-- potential for ESI before doing anything else.
response_fetched = {
{ in_case = "vary_modified", begin = "considering_esi_scan" },
{ begin = "considering_vary" },
},
vary_modified = {
{ begin = "considering_esi_scan" },
},
vary_unmodified = {
{ begin = "considering_esi_scan" }
},
partial_response_fetched = {
{ begin = "considering_background_fetch" },
},
-- We had a partial response and were able to schedule a backgroud fetch for
-- the complete resource.
can_fetch_in_background = {
{
in_case = "partial_response_fetched",
begin = "considering_esi_scan",
but_first = "fetch_in_background"
},
},
-- We had a partial response but skipped background fetching the complete
-- resource, most likely because it is bigger than cache_max_memory.
background_fetch_skipped = {
{
in_case = "partial_response_fetched",
begin = "considering_esi_scan"
},
},
-- If we went upstream and errored, check if we can serve a cached copy
-- (stale-if-error), publish the error first if we were the surrogate
-- request
upstream_error = {
{
after = "fetching_as_surrogate",
begin = "publishing_collapse_upstream_error"
},
{ in_case = "cache_not_accepted", begin = "serving_upstream_error" },
{ in_case = "cache_missing", begin = "serving_upstream_error" },
{ begin = "considering_stale_error" }
},
-- We had an error from upstream and could not serve stale content, so serve
-- the error.
-- Or we were collapsed and the surrogate received an error but we could not
-- serve stale in that case, try and fetch ourselves
can_serve_upstream_error = {
{ after = "fetching", begin = "serving_upstream_error" },
{ in_case = "collapsed_forwarding_upstream_error", begin = "fetching" },
{ begin = "serving_upstream_error" },
},
-- We've determined we need to scan the body for ESI.
esi_scan_enabled = {
{
begin = "considering_gzip_inflate",
but_first = "set_esi_scan_enabled"
},
},
-- We've determined no need to scan the body for ESI.
esi_scan_disabled = {
{ begin = "updating_cache", but_first = "set_esi_scan_disabled" },
},
gzip_inflate_enabled = {
{
after = "updating_cache",
begin = "preparing_response",
but_first = "install_gzip_decoder"
},
{
in_case = "esi_scan_enabled",
begin = "updating_cache",
but_first = { "install_gzip_decoder", "install_esi_scan_filter" }
},
{ begin = "preparing_response", but_first = "install_gzip_decoder" },
},
gzip_inflate_disabled = {
{ after = "updating_cache", begin = "preparing_response" },
{
after = "considering_esi_scan",
in_case = "esi_scan_enabled",
begin = "updating_cache",
but_first = { "install_esi_scan_filter" }
},
{ in_case = "esi_process_disabled", begin = "checking_range_request" },
{ begin = "preparing_response" },
},
range_accepted = {
{ begin = "preparing_response", but_first = "install_range_filter" },
},
range_not_accepted = {
{ begin = "preparing_response" },
},
range_not_requested = {
{ begin = "preparing_response" },
},
-- We deduced that the new response can cached. We always "save_to_cache".
-- If we were fetching as a surrogate (collapsing) make sure we tell any
-- others concerned. If we were performing a background revalidate (having
-- served stale), we can just exit. Otherwise go back through validationg
-- in case we can 304 to the client.
response_cacheable = {
{
after = "fetching_as_surrogate",
in_case = "vary_modified",
begin = "publishing_collapse_vary_modified",
but_first = "save_to_cache"
},
{
after = "fetching_as_surrogate",
begin = "publishing_collapse_success",
but_first = "save_to_cache"
},
{
begin = "considering_local_revalidation",
but_first = "save_to_cache"
},
},
-- We've deduced that the new response cannot be cached. Essentially this is
-- as per "response_cacheable", except we "delete" rather than "save", and
-- we don't try to revalidate.
response_not_cacheable = {
{
after = "fetching_as_surrogate",
begin = "publishing_collapse_failure",
but_first = "delete_from_cache"
},
{ begin = "considering_esi_process", but_first = "delete_from_cache" },
},
-- A missing response body means a HEAD request or a 304 Not Modified
-- upstream response, for example. If we were revalidating upstream, we can
-- now re-revalidate against local cache. If we're collapsing or background
-- revalidating, ensure we either clean up the collapsees or exit
-- respectively.
response_body_missing = {
{
in_case = "must_revalidate",
begin = "considering_local_revalidation"
},
{
after = "fetching_as_surrogate",
begin = "publishing_collapse_failure",
but_first = "delete_from_cache"
},
{
begin = "serving",
but_first = {
"install_no_body_reader", "set_http_status_from_response"
},
},
},
-- We were the collapser, so digressed into being a surrogate. We're done
-- now and have published this fact, so we pick up where it would have left
-- off - attempting to 304 to the client. Unless we received an error, in
-- which case check if we can serve stale instead.
published = {
{ in_case = "upstream_error", begin = "considering_stale_error" },
{ begin = "considering_local_revalidation" },
},
-- Client requests a max-age of 0 or stored response requires revalidation.
must_revalidate = {
{ begin = "checking_can_fetch" },
},
-- We can validate locally, so do it. This doesn't imply it's valid, merely
-- that we have the correct parameters to attempt validation.
can_revalidate_locally = {
{ begin = "revalidating_locally" },
},
-- Standard non-conditional request.
no_validator_present = {
{ begin = "considering_esi_process" },
},
-- The response has not been modified against the validators given. We'll
-- exit 304 if we can but go via considering_esi_process in case of ESI work
-- to be done.
not_modified = {
{ when = "revalidating_locally", begin = "considering_esi_process" },
},
-- Our cache has been modified as compared to the validators. But cache is
-- valid, so just serve it. If we've been upstream, re-compare against
-- client validators.
modified = {
{ when = "revalidating_locally", begin = "considering_esi_process" },
},
esi_process_enabled = {
{
in_case = "can_serve_stale",
begin = "serving_stale",
but_first = {
"install_esi_process_filter",
"set_esi_process_enabled",
"zero_downstream_lifetime",
"remove_surrogate_control_header"
}
},
{
begin = "preparing_response",
but_first = {
"install_esi_process_filter",
"set_esi_process_enabled",
"zero_downstream_lifetime",
"remove_surrogate_control_header"
}
},
},
esi_process_disabled = {
{
begin = "considering_gzip_inflate",
but_first = "set_esi_process_disabled"
},
},
esi_process_not_required = {
{
begin = "considering_gzip_inflate",
but_first = {
"set_esi_process_disabled",
"remove_surrogate_control_header"
},
},
},
can_serve_disconnected = {
{
begin = "considering_esi_process",
but_first = "add_disconnected_warning"
},
},
-- We've deduced we can serve a stale version of this URI. Ensure we add a
-- warning to the response headers.
can_serve_stale = {
{
after = "considering_stale_error",
begin = "considering_esi_process",
but_first = "add_stale_warning"
},
{
begin = "considering_revalidation",
but_first = { "add_stale_warning" }
},
},
-- We can serve stale, but also trigger a background revalidation
can_serve_stale_while_revalidate = {
{
begin = "considering_esi_process",
but_first = { "add_stale_warning", "revalidate_in_background" }
},
},
-- We have a response we can use. If we've already served (we are doing
-- background work) then just exit. If it has been prepared and we were
-- not_modified, then set 304 and serve. If it has been prepared, set
-- status accordingly and serve. If not, prepare it.
response_ready = {
{
in_case = "served",
begin = "exiting"
},
{
in_case = "forced_cache",
begin = "serving",
but_first = "add_disconnected_warning"
},
-- If we might ESI, then don't 304 downstream.
{
when = "preparing_response",
in_case = "esi_process_enabled",
begin = "serving",
but_first = "set_http_status_from_response"
},
{
when = "preparing_response",
in_case = "not_modified",
after = "fetching",
begin = "serving",
but_first = {
"set_http_not_modified",
"disable_output_buffers"
}
},
{
when = "preparing_response",
in_case = "not_modified",
begin = "serving",
but_first = {
"set_http_not_modified",
"install_no_body_reader"
}
},
{
when = "preparing_response",
begin = "serving",
but_first = "set_http_status_from_response"
},
{
begin = "preparing_response"
},
},
-- We have sent the response. If it was stale, we go back around the
-- fetching path so that a background revalidation can occur unless the
-- upstream errored. Otherwise exit.
served = {
{ in_case = "upstream_error", begin = "exiting" },
{ in_case = "collapsed_forwarding_upstream_error", begin = "exiting" },
{ in_case = "response_cacheable", begin = "exiting" },
{ begin = "exiting" },
},
-- When the client request is aborted clean up redis / http connections.
-- If we're saving or have the collapse lock, then don't abort as we want
-- to finish regardless.
-- Note: this is a special entry point, triggered by ngx_lua client abort
-- notification.
aborted = {
{ in_case = "response_cacheable", begin = "cancelling_abort_request" },
{
in_case = "obtained_collapsed_forwarding_lock",
begin = "cancelling_abort_request"
},
{ begin = "aborting" },
},
-- Useful events for exiting with a common status. If we've already served
-- (perhaps we're doing background work, we just exit without re-setting the
-- status (as this errors).
http_gateway_timeout = {
{ in_case = "served", begin = "exiting" },
{ begin = "exiting", but_first = "set_http_gateway_timeout" },
},
http_service_unavailable = {
{ in_case = "served", begin = "exiting" },
{ begin = "exiting", but_first = "set_http_service_unavailable" },
},
http_internal_server_error = {
{ in_case = "served", begin = "exiting" },
{ begin = "exiting", but_first = "set_http_internal_server_error" },
},
}
================================================
FILE: lib/ledge/state_machine/pre_transitions.lua
================================================
local _M = { -- luacheck: no unused
_VERSION = "2.3.0",
}
-- Pre-transitions. These actions will *always* be performed before
-- transitioning.
return {
exiting = { "redis_close", "httpc_close" },
aborting = { "redis_close", "httpc_close_without_keepalive" },
checking_cache = { "read_cache" },
-- Never fetch with client validators, but put them back afterwards.
fetching = {
"remove_client_validators", "fetch", "restore_client_validators"
},
-- Need to save the error response before reading from cache in case we
-- need to serve it later
considering_stale_error = {
"stash_error_response",
"read_cache"
},
-- Restore the saved response and set the status when serving an error page
serving_upstream_error = {
"restore_error_response",
"set_http_status_from_response"
},
serving_stale = {
"set_http_status_from_response",
},
cancelling_abort_request = {
"disable_output_buffers"
},
}
================================================
FILE: lib/ledge/state_machine/states.lua
================================================
local ledge = require("ledge")
local esi = require("ledge.esi")
local range = require("ledge.range")
local ngx_log = ngx.log
local ngx_ERR = ngx.ERR
local ngx_null = ngx.null
local ngx_PARTIAL_CONTENT = 206
local ngx_req_get_method = ngx.req.get_method
local ngx_req_get_headers = ngx.req.get_headers
local str_find = string.find
local str_lower = string.lower
local header_has_directive = require("ledge.header_util").header_has_directive
local can_revalidate_locally =
require("ledge.validation").can_revalidate_locally
local must_revalidate = require("ledge.validation").must_revalidate
local is_valid_locally = require("ledge.validation").is_valid_locally
local can_serve_stale = require("ledge.stale").can_serve_stale
local can_serve_stale_if_error = require("ledge.stale").can_serve_stale_if_error
local can_serve_stale_while_revalidate =
require("ledge.stale").can_serve_stale_while_revalidate
local req_accepts_cache = require("ledge.request").accepts_cache
local purge_mode = require("ledge.request").purge_mode
local purge = require("ledge.purge").purge
local purge_api = require("ledge.purge").purge_api
local purge_in_background = require("ledge.purge").purge_in_background
local create_purge_response = require("ledge.purge").create_purge_response
local acquire_lock = require("ledge.collapse").acquire_lock
local parse_content_range = require("ledge.range").parse_content_range
local vary_compare = require("ledge.cache_key").vary_compare
local _M = { -- luacheck: no unused
_VERSION = "2.3.0",
}
-- Decision states.
-- Represented as functions which should simply make a decision, and return
-- calling state_machine:e(ev) with the event that has occurred. Place any
-- further logic in actions triggered by the transition table.
return {
checking_method = function(sm)
local method = ngx_req_get_method()
if method == "PURGE" then
return sm:e "purge_requested"
elseif method ~= "GET" and method ~= "HEAD" then
-- Only GET/HEAD are cacheable
return sm:e "cache_not_accepted"
else
return sm:e "cacheable_method"
end
end,
considering_purge_api = function(sm)
local ct = ngx_req_get_headers()["Content-Type"]
if ct and str_lower(ct) == "application/json" then
return sm:e "purge_api_requested"
else
return sm:e "purge_requested"
end
end,
considering_wildcard_purge = function(sm, handler)
local key_chain = handler:cache_key_chain()
if str_find(key_chain.root, "*", 1, true) then
return sm:e "wildcard_purge_requested"
else
return sm:e "purge_requested"
end
end,
checking_origin_mode = function(sm, handler)
-- Ignore the client requirements if we're not in "NORMAL" mode.
if handler.config.origin_mode < ledge.ORIGIN_MODE_NORMAL then
return sm:e "forced_cache"
else
return sm:e "cacheable_method"
end
end,
accept_cache = function(sm)
return sm:e "cache_accepted"
end,
checking_request = function(sm)
if req_accepts_cache() then
return sm:e "cache_accepted"
else
return sm:e "cache_not_accepted"
end
end,
checking_cache = function(sm, handler)
local res = handler.response
if not next(res) then
return sm:e "cache_missing"
elseif res:has_expired() then
return sm:e "cache_expired"
else
return sm:e "cache_valid"
end
end,
considering_gzip_inflate = function(sm, handler)
local res = handler.response
local accept_encoding = ngx_req_get_headers()["Accept-Encoding"] or ""
-- If the response is gzip encoded and the client doesn't support it, then inflate
if res.header["Content-Encoding"] == "gzip" then
local accepts_gzip = header_has_directive(accept_encoding, "gzip")
if handler.esi_scan_enabled or
(handler.config.gunzip_enabled and accepts_gzip == false) then
return sm:e "gzip_inflate_enabled"
end
end
return sm:e "gzip_inflate_disabled"
end,
considering_esi_scan = function(sm, handler)
if handler.config.esi_enabled == true then
local res = handler.response
if not res.has_body then
return sm:e "esi_scan_disabled"
end
-- Choose an ESI processor from the Surrogate-Control header
-- (Currently there is only the ESI/1.0 processor)
local processor = esi.choose_esi_processor(handler)
if processor then
if esi.is_allowed_content_type(res, handler.config.esi_content_types) then
-- Store parser for processing
-- TODO: Strictly this should be installed by the state machine
handler.esi_processor = processor
return sm:e "esi_scan_enabled"
end
end
end
return sm:e "esi_scan_disabled"
end,
-- We decide to process if:
-- - We know the response has_esi (fast path)
-- - We already decided to scan for esi (slow path)
-- - We aren't delegating responsibility downstream, which would occur when both:
-- - Surrogate-Capability is set with a matching parser type and version.
-- - Delegation is enabled in configuration.
--
-- So essentially, if we think we may need to process, then we do. We don't want to
-- accidentally send ESI instructions to a client, so we only delegate if we're sure.
considering_esi_process = function(sm, handler)
local res = handler.response
-- If we know there's no esi or it hasn't been scanned, don't process
if not res.has_esi and res.esi_scanned == false then
return sm:e "esi_process_disabled"
end
if not next(handler.esi_processor) then
-- On the fast path with ESI already detected, the processor wont have been loaded
-- yet, so we must do that now
-- TODO: Perhaps the state machine can load the processor to avoid this weird check
if res.has_esi then
local p = esi.choose_esi_processor(handler)
if not p then
-- This shouldn't happen
-- if res.has_esi is set then a processor should be selectedable
return sm:e "esi_process_not_required"
else
handler.esi_processor = p
end
else
-- We know there's nothing to do
return sm:e "esi_process_not_required"
end
end
if esi.can_delegate_to_surrogate(
handler.config.esi_allow_surrogate_delegation,
handler.esi_processor.token
) then
-- Disabled due to surrogate delegation
return sm:e "esi_process_disabled"
else
return sm:e "esi_process_enabled"
end
end,
checking_range_request = function(sm, handler)
local res = handler.response
-- TODO this should just check, not install range?
local range = range.new()
local res, partial_response = range:handle_range_request(res)
handler.range = range
handler.response = res
if partial_response then
return sm:e "range_accepted"
elseif partial_response == false then
return sm:e "range_not_accepted"
else
return sm:e "range_not_requested"
end
end,
checking_can_fetch = function(sm, handler)
if handler.config.origin_mode == ledge.ORIGIN_MODE_BYPASS then
return sm:e "http_service_unavailable"
end
if header_has_directive(
ngx_req_get_headers()["Cache-Control"], "only-if-cached"
) then
return sm:e "http_gateway_timeout"
end
if handler.config.enable_collapsed_forwarding then
return sm:e "can_fetch_but_try_collapse"
end
return sm:e "can_fetch"
end,
requesting_collapse_lock = function(sm, handler)
local timeout = tonumber(handler.config.collapsed_forwarding_window)
if not timeout then
ngx_log(ngx_ERR, "collapsed_forwarding_window must be a number")
return sm:e "collapsed_forwarding_failed"
end
local redis = handler.redis
local key_chain = handler:cache_key_chain()
local lock_key = key_chain.fetching_lock
local res, err = acquire_lock(redis, lock_key, timeout)
if res == nil then -- Lua script failed
if err then ngx_log(ngx_ERR, err) end
return sm:e "collapsed_forwarding_failed"
elseif res then -- We have the lock
return sm:e "obtained_collapsed_forwarding_lock"
else
-- We didn't get the lock, try to collapse
-- Create a new Redis connection and put it into subscribe mode
-- Then check if the lock still exists as it may have been freed
-- in the time between attempting to acquire and subscribing.
-- In which case we have missed the publish event
local redis_subscriber = ledge.create_redis_connection()
local ok, err = redis_subscriber:subscribe(lock_key)
if not ok or ok == ngx_null then
-- Failed to enter subscribe mode
if err then ngx_log(ngx_ERR, err) end
return sm:e "collapsed_forwarding_failed"
end
local ok, err = redis:exists(lock_key)
if ok == 1 then
-- We subscribed before the lock was freed
handler.redis_subscriber = redis_subscriber
return sm:e "subscribed_to_collapsed_forwarding_channel"
elseif ok == 0 then
-- Lock was freed before we subscribed
return sm:e "collapsed_forwarding_channel_closed"
else
-- Error checking lock still exists
if err then ngx_log(ngx_ERR, err) end
return sm:e "collapsed_forwarding_failed"
end
end
end,
publishing_collapse_success = function(sm, handler)
local redis = handler.redis
local key = handler._publish_key
redis:del(key) -- Clear the lock
redis:publish(key, "collapsed_response_ready")
return sm:e "published"
end,
publishing_collapse_failure = function(sm, handler)
local redis = handler.redis
local key = handler._publish_key
redis:del(key) -- Clear the lock
redis:publish(key, "collapsed_forwarding_failed")
return sm:e "published"
end,
publishing_collapse_upstream_error = function(sm, handler)
local redis = handler.redis
local key = handler._publish_key
redis:del(key) -- Clear the lock
redis:publish(key, "collapsed_forwarding_upstream_error")
return sm:e "published"
end,
publishing_collapse_vary_modified = function(sm, handler)
local redis = handler.redis
local key = handler._publish_key
redis:del(key) -- Clear the lock
redis:publish(key, "collapsed_forwarding_vary_modified")
return sm:e "published"
end,
fetching_as_surrogate = function(sm, handler)
-- stash these because we might change the key
-- depending on vary response
local key_chain = handler:cache_key_chain()
handler._publish_key = key_chain.fetching_lock
return sm:e "can_fetch"
end,
considering_vary = function(sm, handler)
local new_spec = handler.response:parse_vary_header()
local key_chain = handler:cache_key_chain()
if vary_compare(new_spec, key_chain.vary_spec) == false then
handler:set_vary_spec(new_spec)
return sm:e "vary_modified"
else
return sm:e "vary_unmodified"
end
end,
waiting_on_collapsed_forwarding_channel = function(sm, handler)
local redis = handler.redis_subscriber
-- Extend the timeout to the size of the window
redis:set_timeout(handler.config.collapsed_forwarding_window)
local res, _ = redis:read_reply() -- block until we hear something or timeout
if not res or res == ngx_null then
return sm:e "collapsed_forwarding_failed"
else
-- TODO this config is now in the singleton
redis:set_timeout(60) --handler.config.redis_read_timeout)
redis:unsubscribe()
ledge.close_redis_connection(redis)
-- This is overly explicit for the sake of state machine introspection. That is
-- we never call sm:e() without a literal event string.
if res[3] == "collapsed_response_ready" then
return sm:e "collapsed_response_ready"
elseif res[3] == "collapsed_forwarding_upstream_error" then
return sm:e "collapsed_forwarding_upstream_error"
elseif res[3] == "collapsed_forwarding_vary_modified" then
return sm:e "collapsed_forwarding_vary_modified"
else
return sm:e "collapsed_forwarding_failed"
end
end
end,
fetching = function(sm, handler)
local res = handler.response
if res.status >= 500 then
return sm:e "upstream_error"
elseif res.status == ngx.HTTP_NOT_MODIFIED then
return sm:e "response_ready"
elseif res.status == ngx_PARTIAL_CONTENT then
return sm:e "partial_response_fetched"
else
return sm:e "response_fetched"
end
end,
considering_background_fetch = function(sm, handler)
local res = handler.response
if res.status ~= ngx_PARTIAL_CONTENT then
-- Shouldn't happen, but just in case
return sm:e "background_fetch_skipped"
else
local content_range = res.header["Content-Range"]
if content_range then
local _, _, size = parse_content_range(content_range)
if size then
local max_size = handler.storage:get_max_size()
if type(max_size) == "number" and max_size > size then
return sm:e "can_fetch_in_background"
end
end
end
return sm:e "background_fetch_skipped"
end
end,
purging_via_api = function(sm, handler)
local ok = purge_api(handler)
if ok then
return sm:e "purge_api_completed"
else
return sm:e "purge_api_failed"
end
end,
purging = function(sm, handler)
local mode = purge_mode()
local ok, message, job = purge(handler, mode, handler:cache_key_chain().repset)
local json = create_purge_response(mode, message, job)
handler.response:set_body(json)
if ok then
return sm:e "purged"
else
return sm:e "nothing_to_purge"
end
end,
wildcard_purging = function(sm, handler)
purge_in_background(handler, purge_mode())
return sm:e "wildcard_purge_scheduled"
end,
considering_stale_error = function(sm, handler)
local res = handler.response
if can_serve_stale_if_error(res) then
return sm:e "can_serve_disconnected"
else
return sm:e "can_serve_upstream_error"
end
end,
serving_upstream_error = function(sm, handler)
handler:serve()
return sm:e "served"
end,
considering_revalidation = function(sm, handler)
if must_revalidate(handler.response) then
return sm:e "must_revalidate"
elseif can_revalidate_locally() then
return sm:e "can_revalidate_locally"
else
return sm:e "no_validator_present"
end
end,
considering_local_revalidation = function(sm)
if can_revalidate_locally() then
return sm:e "can_revalidate_locally"
else
return sm:e "no_validator_present"
end
end,
revalidating_locally = function(sm, handler)
if is_valid_locally(handler.response) then
return sm:e "not_modified"
else
return sm:e "modified"
end
end,
checking_can_serve_stale = function(sm, handler)
local res = handler.response
if handler.config.origin_mode < ledge.ORIGIN_MODE_NORMAL then
return sm:e "can_serve_stale"
elseif can_serve_stale_while_revalidate(res) then
return sm:e "can_serve_stale_while_revalidate"
elseif can_serve_stale(res) then
return sm:e "can_serve_stale"
else
return sm:e "cache_expired"
end
end,
updating_cache = function(sm, handler)
local res = handler.response
if res.has_body then
if res:is_cacheable() then
return sm:e "response_cacheable"
else
return sm:e "response_not_cacheable"
end
else
return sm:e "response_body_missing"
end
end,
preparing_response = function(sm)
return sm:e "response_ready"
end,
serving = function(sm, handler)
handler:serve()
return sm:e "served"
end,
serving_stale = function(sm, handler)
handler:serve()
return sm:e "served"
end,
exiting = function()
ngx.exit(ngx.status)
end,
cancelling_abort_request = function()
return true
end,
aborting = function()
ngx.exit(ngx.status)
end,
}
================================================
FILE: lib/ledge/state_machine.lua
================================================
local events = require("ledge.state_machine.events")
local pre_transitions = require("ledge.state_machine.pre_transitions")
local states = require("ledge.state_machine.states")
local actions = require("ledge.state_machine.actions")
local ngx_log = ngx.log
local ngx_DEBUG = ngx.DEBUG
local ngx_ERR = ngx.ERR
local get_fixed_field_metatable_proxy =
require("ledge.util").mt.get_fixed_field_metatable_proxy
local _DEBUG = false
local _M = {
_VERSION = "2.3.0",
set_debug = function(debug) _DEBUG = debug end,
}
local function new(handler)
return setmetatable({
handler = handler,
state_history = {},
event_history = {},
current_state = "",
}, get_fixed_field_metatable_proxy(_M))
end
_M.new = new
-- Transition to a new state.
local function t(self, state)
-- Check for any transition pre-tasks
local pre_t = pre_transitions[state]
if pre_t then
for _,action in ipairs(pre_t) do
if _DEBUG then ngx_log(ngx_DEBUG, "#a: ", action) end
local ok, err = pcall(actions[action], self.handler)
if not ok then
ngx_log(ngx_ERR, "state '", state, "' failed to call action '", action, "': ", tostring(err))
end
end
end
if _DEBUG then ngx_log(ngx_DEBUG, "#t: ", state) end
self.state_history[state] = true
self.current_state = state
return states[state](self, self.handler)
end
_M.t = t
-- Process state transitions and actions based on the event fired.
local function e(self, event)
if _DEBUG then ngx_log(ngx_DEBUG, "#e: ", event) end
self.event_history[event] = true
-- It's possible for states to call undefined events at run time.
if not events[event] then
ngx_log(ngx.CRIT, event, " is not defined.")
ngx.status = ngx.HTTP_INTERNAL_SERVER_ERROR
self:t("exiting")
end
for _, trans in ipairs(events[event]) do
local t_when = trans["when"]
if t_when == nil or t_when == self.current_state then
local t_after = trans["after"]
if not t_after or self.state_history[t_after] then
local t_in_case = trans["in_case"]
if not t_in_case or self.event_history[t_in_case] then
local t_but_first = trans["but_first"]
if t_but_first then
if type(t_but_first) == "table" then
for _,action in ipairs(t_but_first) do
if _DEBUG then
ngx_log(ngx_DEBUG, "#a: ", action)
end
actions[action](self.handler)
end
else
if _DEBUG then
ngx_log(ngx_DEBUG, "#a: ", t_but_first)
end
actions[t_but_first](self.handler)
end
end
return self:t(trans["begin"])
end
end
end
end
end
_M.e = e
return _M
================================================
FILE: lib/ledge/storage/redis.lua
================================================
local redis_connector = require "resty.redis.connector"
local tostring, pairs, next, unpack, setmetatable =
tostring, pairs, next, unpack, setmetatable
local ngx_null = ngx.null
local ngx_log = ngx.log
local ngx_ERR = ngx.ERR
local ngx_WARN = ngx.WARN
local tbl_insert = table.insert
local tbl_copy_merge_defaults = require("ledge.util").table.copy_merge_defaults
local fixed_field_metatable = require("ledge.util").mt.fixed_field_metatable
local get_fixed_field_metatable_proxy =
require("ledge.util").mt.get_fixed_field_metatable_proxy
local _M = {
_VERSION = "2.3.0",
}
-- Default parameters
local defaults = setmetatable({
redis_connector_params = {},
max_size = 1024 * 1024, -- Max storable size, in bytes
-- Optional atomicity
-- e.g. for use with a Redis proxy which doesn't support transactions
supports_transactions = true,
}, fixed_field_metatable)
-- Redis key namespace
local KEY_PREFIX = "ledge:entity:"
-- Returns the Redis keys for entity_id
local function entity_keys(entity_id)
if entity_id then
return {
-- Both keys are lists of chunks
body = KEY_PREFIX .. "{" .. entity_id .. "}" .. ":body",
body_esi = KEY_PREFIX .. "{" .. entity_id .. "}" .. ":body_esi",
}
end
end
-- Creates a new (disconnected) storage instance
--
-- @return table The module instance
function _M.new()
return setmetatable({
redis = {},
params = {},
_reader_cursor = 0,
_keys_created = false,
}, get_fixed_field_metatable_proxy(_M))
end
-- Connects to the Redis storage backend
--
-- @param table Module instance (self)
-- @param table Storage params
function _M.connect(self, user_params)
-- take user_params by value and merge with defaults
user_params = tbl_copy_merge_defaults(user_params, defaults)
self.params = user_params
local rc, err = redis_connector.new(
user_params.redis_connector_params
)
if not rc then
return nil, err
end
local redis, err = rc:connect()
if not redis then
return nil, err
else
self.redis = redis
return self, nil
end
end
-- Closes the Redis connection (placing back on the keepalive pool)
--
-- @param table Module instance (self)
function _M.close(self)
self._reader_cursor = 0
self._keys_created = false
local redis = self.redis
if redis then
local rc, err = redis_connector.new(
self.params.redis_connector_params
)
if not rc then
return nil, err
end
return rc:set_keepalive(redis)
end
end
-- Returns the maximum size this connection is prepared to store.
--
-- @param table Module instance (self)
-- @return number Size (bytes)
function _M.get_max_size(self)
return self.params.max_size
end
-- Returns a boolean indicating if the entity exists.
--
-- @param table Module instance (self)
-- @param string The entity ID
-- @return boolean (exists)
-- @return string err (or nil)
function _M.exists(self, entity_id)
local keys = entity_keys(entity_id)
if not keys then
return nil, "no entity id"
else
local redis = self.redis
redis:init_pipeline(2)
redis:exists(keys.body)
redis:exists(keys.body_esi)
local res, err = redis:commit_pipeline()
if not res and err then
return nil, err
elseif res == ngx_null or #res < 2 then
return nil, "expected 2 pipelined command results"
else
return res[1] == 1 and res[2] == 1
end
end
end
-- Deletes an entity
--
-- @param table Module instance (self)
-- @param string The entity ID
-- @return boolean success
-- @return string err (or nil)
function _M.delete(self, entity_id)
local key_chain = entity_keys(entity_id)
if key_chain then
local keys = {}
for _, v in pairs(key_chain) do
tbl_insert(keys, v)
end
local res, err = self.redis:del(unpack(keys))
if res == 0 and not err then
return false, nil
else
return res, err
end
end
end
-- Sets the time-to-live for an entity
--
-- @param table Module instance (self)
-- @param string The entity ID
-- @param number TTL (seconds)
-- @return boolean success
-- @return string err (or nil)
function _M.set_ttl(self, entity_id, ttl)
local key_chain = entity_keys(entity_id)
if key_chain then
local res, err
for _,key in pairs(key_chain) do
res, err = self.redis:expire(key, ttl)
end
if not res then
return res, err
elseif res == 0 then
return false, "entity does not exist"
else
return true, nil
end
end
end
-- Gets the time-to-live for an entity
--
-- @param table Module instance (self)
-- @param string The entity ID
-- @return number ttl
-- @return string err (or nil)
function _M.get_ttl(self, entity_id)
local key_chain = entity_keys(entity_id)
if next(key_chain) then
local res, err = self.redis:ttl(key_chain.body)
if not res then
return res, err
elseif res == -2 then
return false, "entity does not exist"
elseif res == -1 then
return false, "entity does not have a ttl"
else
return res, nil
end
end
end
-- Returns an iterator for reading the body chunks.
--
-- @param table Module instance (self)
-- @param table Response object
-- @return function Iterator, returning chunk, err, has_esi for each call
function _M.get_reader(self, res)
local redis = self.redis
local entity_id = res.entity_id
local entity_keys = entity_keys(entity_id)
local num_chunks = redis:llen(entity_keys.body) or 0
return function()
local cursor = self._reader_cursor
self._reader_cursor = cursor + 1
if cursor < num_chunks then
local chunk, err = redis:lindex(entity_keys.body, cursor)
if not chunk then return nil, err, nil end
local has_esi, err = redis:lindex(entity_keys.body_esi, cursor)
if not has_esi then return nil, err, nil end
if chunk == ngx_null or has_esi == ngx_null then
ngx_log(ngx_WARN,
"entity removed during read, ",
entity_keys.body
)
chunk = nil
end
return chunk, nil, has_esi == "true"
end
end
end
-- Writes a given chunk
local function write_chunk(self, entity_keys, chunk, has_esi, ttl)
local redis = self.redis
-- Write chunks / has_esi onto lists
local ok, e = redis:rpush(entity_keys.body, chunk)
if not ok then return nil, e end
ok, e = redis:rpush(entity_keys.body_esi, tostring(has_esi))
if not ok then return nil, e end
-- If this is the first write, set expiration too
if not self._keys_created then
self._keys_created = true
ok, e = redis:expire(entity_keys.body, ttl)
if not ok then return nil, e end
ok, e = redis:expire(entity_keys.body_esi, ttl)
if not ok then return nil, e end
end
return true, nil
end
-- Returns an iterator which writes chunks to cache as they are read from
-- reader belonging to the repsonse object.
-- If we cross the maxsize boundary, or error for any reason, we just
-- keep yielding chunks to be served, after having removed the cache entry.
--
-- @param table Module instance (self)
-- @param table Response object
-- @param number time-to-live
-- @param function onsuccess callback
-- @param function onfailure callback
-- @return function Iterator, returning chunk, err, has_esi for each call
function _M.get_writer(self, res, ttl, onsuccess, onfailure)
local redis = self.redis
local max_size = self.params.max_size
local supports_transactions = self.params.supports_transactions
local entity_id = res.entity_id
local entity_keys = entity_keys(entity_id)
local failed = false
local failed_reason = ""
local transaction_open = false
local size = 0
local reader = res.body_reader
return function(buffer_size)
if not transaction_open and supports_transactions then
redis:multi()
transaction_open = true
end
local chunk, err, has_esi = reader(buffer_size)
if not chunk and err then
failed = true
failed_reason = "upstream error: " .. err
end
if chunk and not failed then -- We have something to write
size = size + #chunk
if max_size and size > max_size then
failed = true
failed_reason = "body is larger than " .. max_size .. " bytes"
else
local ok, e = write_chunk(self,
entity_keys,
chunk,
has_esi,
ttl
)
if not ok then
failed = true
failed_reason = "error writing: " .. tostring(e)
end
end
elseif not chunk and not failed then -- We're finished
if supports_transactions then
local ok, e = redis:exec() -- Commit
if not ok or ok == ngx_null then
-- Transaction failed
ok, e = pcall(onfailure, e)
if not ok then ngx_log(ngx_ERR, e) end
end
end
-- All good, report success
local ok, e = pcall(onsuccess, size)
if not ok then ngx_log(ngx_ERR, e) end
elseif not chunk and failed then -- We're finished, but failed
if supports_transactions then
redis:discard() -- Rollback
else
-- Attempt to clean up manually (connection could be dead)
local ok, e = redis:del(
entity_keys.body,
entity_keys.body_esi
)
if not ok or ok == ngx_null then ngx_log(ngx_ERR, e) end
end
local ok, e = pcall(onfailure, failed_reason)
if not ok then ngx_log(ngx_ERR, e) end
end
-- Always bubble up
return chunk, err, has_esi
end
end
return _M
================================================
FILE: lib/ledge/util.lua
================================================
local ngx_var = ngx.var
local ffi = require "ffi"
local type, next, setmetatable, getmetatable, error, tostring =
type, next, setmetatable, getmetatable, error, tostring
local str_find = string.find
local str_sub = string.sub
local co_create = coroutine.create
local co_status = coroutine.status
local co_resume = coroutine.resume
local math_floor = math.floor
local ffi_cdef = ffi.cdef
local ffi_new = ffi.new
local ffi_string = ffi.string
local C = ffi.C
local ok, err = pcall(ffi_cdef, [[
typedef unsigned char u_char;
u_char * ngx_hex_dump(u_char *dst, const u_char *src, size_t len);
int RAND_pseudo_bytes(u_char *buf, int num);
]])
if not ok then ngx.log(ngx.ERR, err) end
local ok, err = pcall(ffi_cdef, [[
int gethostname (char *name, size_t size);
]])
if not ok then ngx.log(ngx.ERR, err) end
local _M = {
_VERSION = "2.3.0",
string = {},
table = {},
mt = {},
coroutine = {},
}
local function randomhex(len)
local len = math_floor(len / 2)
local bytes = ffi_new("uint8_t[?]", len)
C.RAND_pseudo_bytes(bytes, len)
if not bytes then
return nil, "error getting random bytes via FFI"
end
local hex = ffi_new("uint8_t[?]", len * 2)
C.ngx_hex_dump(hex, bytes, len)
return ffi_string(hex, len * 2)
end
_M.string.randomhex = randomhex
local function str_split(str, delim)
local pos, endpos, prev, i = 0, 0, 0, 0 -- luacheck: ignore pos endpos
local out = {}
repeat
pos, endpos = str_find(str, delim, prev, true)
i = i+1
if pos then
out[i] = str_sub(str, prev, pos-1)
else
if prev <= #str then
out[i] = str_sub(str, prev, -1)
end
break
end
prev = endpos +1
until pos == nil
return out
end
_M.string.split = str_split
-- A metatable which prevents undefined fields from being created / accessed
local fixed_field_metatable = {
__index =
function(t, k) -- luacheck: no unused
error("field " .. tostring(k) .. " does not exist", 3)
end,
__newindex =
function(t, k, v) -- luacheck: no unused
error("attempt to create new field " .. tostring(k), 3)
end,
}
_M.mt.fixed_field_metatable = fixed_field_metatable
-- Returns a metatable with fixed fields (as above), which when applied to a
-- table will provide default values via the provided `proxy`. E.g:
--
-- defaults = { a = 1, b = 2, c = 3 }
-- t = setmetatable({ b = 4 }, get_fixed_field_metatable_proxy(defaults))
--
-- `t` now gives: { a = 1, b = 4, c = 3 }
--
-- @param table proxy table
-- @return table metatable
local function get_fixed_field_metatable_proxy(proxy)
return {
__index =
function(t, k) -- luacheck: no unused
return proxy[k] or
error("field " .. tostring(k) .. " does not exist", 2)
end,
__newindex =
function(t, k, v)
if proxy[k] then
return rawset(t, k, v)
else
error("attempt to create new field " .. tostring(k), 2)
end
end,
}
end
_M.mt.get_fixed_field_metatable_proxy = get_fixed_field_metatable_proxy
-- Returns a metatable with fixed fields (as above), which when invoked as a
-- function will call the supplied `func`. E.g.:
--
-- t = setmetatable(
-- { a = 1, b = 2, c = 3 },
-- get_callable_fixed_field_metatable(
-- function(t, field)
-- print(t[field])
-- end
-- )
-- )
-- t("a") -- 1
-- t("b") -- 2
--
-- @param function
-- @return table callable metatable
local function get_callable_fixed_field_metatable(func)
local mt = fixed_field_metatable
mt.__call = func
return mt
end
_M.mt.get_callable_fixed_field_metatable = get_callable_fixed_field_metatable
-- Returns a new table, recursively copied from the one given, retaining
-- metatable assignment.
--
-- @param table table to be copied
-- @return table
local function tbl_copy(orig)
local orig_type = type(orig)
local copy
if orig_type == "table" then
copy = {}
for orig_key, orig_value in next, orig, nil do
copy[tbl_copy(orig_key)] = tbl_copy(orig_value)
end
setmetatable(copy, tbl_copy(getmetatable(orig)))
else -- number, string, boolean, etc
copy = orig
end
return copy
end
_M.table.copy = tbl_copy
-- Returns a new table, recursively copied from the combination of the given
-- table `t1`, with any missing fields copied from `defaults`.
--
-- If `defaults` is of type "fixed field" and `t1` contains a field name not
-- present in the defults, an error will be thrown.
--
-- @param table t1
-- @param table defaults
-- @return table a new table, recursively copied and merged
local function tbl_copy_merge_defaults(t1, defaults)
if t1 == nil then t1 = {} end
if defaults == nil then defaults = {} end
if type(t1) == "table" and type(defaults) == "table" then
local copy = {}
for t1_key, t1_value in next, t1, nil do
copy[tbl_copy(t1_key)] = tbl_copy_merge_defaults(
t1_value, tbl_copy(defaults[t1_key])
)
end
for defaults_key, defaults_value in next, defaults, nil do
if t1[defaults_key] == nil then
copy[tbl_copy(defaults_key)] = tbl_copy(defaults_value)
end
end
return copy
else
return t1 -- not a table
end
end
_M.table.copy_merge_defaults = tbl_copy_merge_defaults
local function co_wrap(func)
local co = co_create(func)
if not co then
return nil, "could not create coroutine"
else
return function(...)
if co_status(co) == "suspended" then
-- Handle errors in coroutines
local ok, val1, val2, val3 = co_resume(co, ...)
if ok == true then
return val1, val2, val3
else
return nil, val1
end
else
return nil, "can't resume a " .. co_status(co) .. " coroutine"
end
end
end
end
_M.coroutine.wrap = co_wrap
local function get_hostname()
local name = ffi_new("char[?]", 255)
C.gethostname(name, 255)
return ffi_string(name)
end
_M.get_hostname = get_hostname
local function append_server_port(name)
-- TODO: compare with scheme?
local server_port = ngx_var.server_port
if server_port ~= "80" and server_port ~= "443" then
name = name .. ":" .. server_port
end
return name
end
_M.append_server_port = append_server_port
return _M
================================================
FILE: lib/ledge/validation.lua
================================================
local ngx_req_get_headers = ngx.req.get_headers
local ngx_req_set_header = ngx.req.set_header
local ngx_parse_http_time = ngx.parse_http_time
local get_numeric_header_token =
require("ledge.header_util").get_numeric_header_token
local header_has_directive = require("ledge.header_util").header_has_directive
local _M = {
_VERSION = "2.3.0",
}
-- True if the request or response (res) demand revalidation.
local function must_revalidate(res)
local req_cc = ngx_req_get_headers()["Cache-Control"]
local req_cc_max_age = get_numeric_header_token(req_cc, "max-age")
if req_cc_max_age == 0 then
return true
else
local res_age = tonumber(res.header["Age"])
local res_cc = res.header["Cache-Control"]
if header_has_directive(res_cc, "(must|proxy)-revalidate") then
return true
elseif req_cc_max_age and res_age then
if req_cc_max_age < res_age then
return true
end
end
end
return false
end
_M.must_revalidate = must_revalidate
-- True if the request contains valid conditional headers.
local function can_revalidate_locally()
local req_h = ngx_req_get_headers()
local req_ims = req_h["If-Modified-Since"]
if req_ims then
if not ngx_parse_http_time(req_ims) then
-- Bad IMS HTTP datestamp, lets remove this.
ngx_req_set_header("If-Modified-Since", nil)
else
return true
end
end
if req_h["If-None-Match"] and req_h["If-None-Match"] ~= "" then
return true
end
return false
end
_M.can_revalidate_locally = can_revalidate_locally
-- True if the request conditions indicate that the response (res) can be served
local function is_valid_locally(res)
local req_h = ngx_req_get_headers()
local res_lm = res.header["Last-Modified"]
local req_ims = req_h["If-Modified-Since"]
if res_lm and req_ims then
local res_lm_parsed = ngx_parse_http_time(res_lm)
local req_ims_parsed = ngx_parse_http_time(req_ims)
if res_lm_parsed and req_ims_parsed then
if res_lm_parsed <= req_ims_parsed then
return true
end
end
end
local res_etag = res.header["Etag"]
local req_inm = req_h["If-None-Match"]
if res_etag and req_inm and res_etag == req_inm then
return true
end
return false
end
_M.is_valid_locally = is_valid_locally
return _M
================================================
FILE: lib/ledge/worker.lua
================================================
local setmetatable = setmetatable
local co_yield = coroutine.yield
local ngx_get_phase = ngx.get_phase
local tbl_copy_merge_defaults = require("ledge.util").table.copy_merge_defaults
local fixed_field_metatable = require("ledge.util").mt.fixed_field_metatable
local _M = {
_VERSION = "2.3.0",
}
local defaults = setmetatable({
interval = 1,
gc_queue_concurrency = 1,
purge_queue_concurrency = 1,
revalidate_queue_concurrency = 1,
}, fixed_field_metatable)
local function new(config)
assert(ngx_get_phase() == "init_worker",
"attempt to create ledge worker outside of the init_worker phase")
-- Take config by value and merge with defaults
local config = tbl_copy_merge_defaults(config, defaults)
return setmetatable({ config = config }, {
__index = _M,
})
end
_M.new = new
local function run(self)
assert(ngx_get_phase() == "init_worker",
"attempt to run ledge worker outside of the init_worker phase")
local ledge = require("ledge")
local ql_worker = assert(require("resty.qless.worker").new({
get_redis_client = ledge.create_qless_connection,
close_redis_client = ledge.close_redis_connection
}))
-- Runs around job exectution, to instantiate necessary connections
ql_worker.middleware = function(job)
job.redis = ledge.create_redis_connection()
co_yield() -- Perform the job
ledge.close_redis_connection(job.redis)
end
-- Start a worker for each fo the queues
assert(ql_worker:start({
interval = self.config.interval,
concurrency = self.config.gc_queue_concurrency,
reserver = "ordered",
queues = { "ledge_gc" },
}))
assert(ql_worker:start({
interval = self.config.interval,
concurrency = self.config.purge_queue_concurrency,
reserver = "ordered",
queues = { "ledge_purge" },
}))
assert(ql_worker:start({
interval = self.config.interval or 1,
concurrency = self.config.revalidate_queue_concurrency,
reserver = "ordered",
queues = { "ledge_revalidate" },
}))
return true
end
_M.run = run
return setmetatable(_M, fixed_field_metatable)
================================================
FILE: lib/ledge.lua
================================================
local setmetatable, require =
setmetatable, require
local ngx_get_phase = ngx.get_phase
local ngx_null = ngx.null
local tbl_insert = table.insert
local util = require("ledge.util")
local tbl_copy = util.table.copy
local tbl_copy_merge_defaults = util.table.copy_merge_defaults
local fixed_field_metatable = util.mt.fixed_field_metatable
local redis_connector = require("resty.redis.connector")
local _M = {
_VERSION = "2.3.0",
ORIGIN_MODE_BYPASS = 1, -- Never go to the origin, serve from cache or 503
ORIGIN_MODE_AVOID = 2, -- Avoid the origin, serve from cache where possible
ORIGIN_MODE_NORMAL = 4, -- Assume the origin is happy, use at will
}
local config = setmetatable({
redis_connector_params = {
connect_timeout = 500, -- (ms)
read_timeout = 5000, -- (ms)
keepalive_timeout = 60000, -- (ms)
keepalive_poolsize = 30,
},
qless_db = 1,
}, fixed_field_metatable)
local function configure(user_config)
assert(ngx_get_phase() == "init",
"attempt to call configure outside the 'init' phase")
config = setmetatable(
tbl_copy_merge_defaults(user_config, config),
fixed_field_metatable
)
end
_M.configure = configure
local handler_defaults = setmetatable({
storage_driver = "ledge.storage.redis",
storage_driver_config = {},
origin_mode = _M.ORIGIN_MODE_NORMAL,
-- Note that upstream timeout and keepalive config is shared with outbound
-- ESI request, which are not necessarily configured to use this "upstream"
upstream_connect_timeout = 1000, -- (ms)
upstream_send_timeout = 2000, -- (ms)
upstream_read_timeout = 10000, -- (ms)
upstream_keepalive_timeout = 75000, -- (ms)
upstream_keepalive_poolsize = 64,
upstream_host = "",
upstream_port = 80,
upstream_use_ssl = false,
upstream_ssl_server_name = "",
upstream_ssl_verify = true,
advertise_ledge = true,
visible_hostname = util.get_hostname(),
buffer_size = 2^16,
keep_cache_for = 86400 * 30, -- (sec)
minimum_old_entity_download_rate = 56,
esi_enabled = false,
esi_content_types = { "text/html" },
esi_allow_surrogate_delegation = false,
esi_recursion_limit = 10,
esi_args_prefix = "esi_",
esi_max_size = 1024 * 1024, -- (bytes)
esi_custom_variables = {},
esi_attempt_loopback = true,
esi_vars_cookie_blacklist = {},
esi_disable_third_party_includes = false,
esi_third_party_includes_domain_whitelist = {},
enable_collapsed_forwarding = false,
collapsed_forwarding_window = 60 * 1000,
gunzip_enabled = true,
keyspace_scan_count = 10,
cache_key_spec = {}, -- No default as we don't ever wish to merge it
max_uri_args = 100,
}, fixed_field_metatable)
-- events are not fixed field to avoid runtime fatal errors from bad config
-- ledge.bind() and handler:bind() both check validity of event names however.
local event_defaults = {
after_cache_read = {},
before_upstream_connect = {},
before_upstream_request = {},
after_upstream_request = {},
before_vary_selection = {},
before_save = {},
before_save_revalidation_data = {},
before_serve = {},
before_esi_include_request = {},
}
local function set_handler_defaults(user_config)
assert(ngx_get_phase() == "init",
"attempt to call set_handler_defaults outside the 'init' phase")
handler_defaults = setmetatable(
tbl_copy_merge_defaults(user_config, handler_defaults),
fixed_field_metatable
)
end
_M.set_handler_defaults = set_handler_defaults
local function bind(event, callback)
assert(ngx_get_phase() == "init",
"attempt to call bind outside the 'init' phase")
local ev = event_defaults[event]
assert(ev, "no such event: " .. tostring(event))
tbl_insert(ev, callback)
return true
end
_M.bind = bind
local function create_worker(config)
return require("ledge.worker").new(config)
end
_M.create_worker = create_worker
local function create_handler(config)
local config = tbl_copy_merge_defaults(config, handler_defaults)
return require("ledge.handler").new(config, tbl_copy(event_defaults))
end
_M.create_handler = create_handler
local function create_redis_connection()
local rc, err = redis_connector.new(config.redis_connector_params)
if not rc then
return nil, err
end
return rc:connect()
end
_M.create_redis_connection = create_redis_connection
local function create_redis_slave_connection()
local params = tbl_copy_merge_defaults(
{ role = "slave" },
config.redis_connector_params
)
local rc, err = redis_connector.new(params)
if not rc then
return nil, err
end
return rc:connect()
end
_M.create_redis_slave_connection = create_redis_slave_connection
local function close_redis_connection(redis)
if not next(redis) then
-- Possible for this to be called before we've created a redis conn
-- Ensure we actually have a resty-redis instance to close
return nil, "No redis connection to close"
end
local rc, err = redis_connector.new(config.redis_connector_params)
if not rc then
return nil, err
end
return rc:set_keepalive(redis)
end
_M.close_redis_connection = close_redis_connection
local function create_qless_connection()
local redis, err = create_redis_connection()
if not redis then return nil, err end
local ok, err = redis:select(config.qless_db)
if not ok or ok == ngx_null then return nil, err end
return redis
end
_M.create_qless_connection = create_qless_connection
local function create_storage_connection(driver_module, storage_driver_config)
-- Take config by value, and merge with defaults
storage_driver_config = tbl_copy_merge_defaults(
storage_driver_config or {},
handler_defaults.storage_driver_config
)
if not driver_module then
driver_module = handler_defaults.storage_driver
end
local ok, module = pcall(require, driver_module)
if not ok then return nil, module end
local ok, driver, err = pcall(module.new)
if not ok then return nil, driver end
if not driver then return nil, err end
local ok, conn, err = pcall(driver.connect, driver, storage_driver_config)
if not ok then return nil, conn end
if not conn then return nil, err end
return conn, nil
end
_M.create_storage_connection = create_storage_connection
local function close_storage_connection(storage)
return storage:close()
end
_M.close_storage_connection = close_storage_connection
return setmetatable(_M, fixed_field_metatable)
================================================
FILE: migrations/1.26-1.27.lua
================================================
local redis_connector = require("resty.redis.connector").new()
local math_floor = math.floor
local math_ceil = math.ceil
local ffi = require "ffi"
local ffi_cdef = ffi.cdef
local ffi_new = ffi.new
local ffi_string = ffi.string
local C = ffi.C
ffi_cdef[[
typedef unsigned char u_char;
u_char * ngx_hex_dump(u_char *dst, const u_char *src, size_t len);
int RAND_pseudo_bytes(u_char *buf, int num);
]]
local function random_hex(len)
local len = math_floor(len / 2)
local bytes = ffi_new("uint8_t[?]", len)
C.RAND_pseudo_bytes(bytes, len)
if not bytes then
ngx_log(ngx_ERR, "error getting random bytes via FFI")
return nil
end
local hex = ffi_new("uint8_t[?]", len * 2)
C.ngx_hex_dump(hex, bytes, len)
return ffi_string(hex, len * 2)
end
function delete(redis, cache_key, entities)
redis:multi()
-- Entities list is intact, so delete them too
for _, entity in ipairs(entities) do
delete_entity(redis, cache_key .. "::entities", entity)
end
local keys = {
cache_key .. "::key",
cache_key .. "::memused",
cache_key .. "::entities",
}
redis:del(unpack(keys))
return redis:exec()
end
function delete_entity(redis, set, entity)
local keys = {
entity,
entity .. ":reval_req_headers",
entity .. ":reval_params",
entity .. ":headers",
entity .. ":body",
entity .. ":body_esi",
}
local res, err = redis:del(unpack(keys))
-- Remove from the entities set
local res, err = redis:zrem(set, entity)
end
function delete_old_entities(redis, set, members, current_entity)
for _, entity in ipairs(members) do
if entity ~= current_entity then
delete_entity(redis, set, entity)
end
end
end
function scan(cursor, redis)
local res, err = redis:scan(
cursor,
"MATCH", "ledge:cache:*::key", -- We use the "main" key to single out a cache entry
"COUNT", 100
)
if not res or res == ngx_null then
return nil, "SCAN error: " .. tostring(err)
else
for _,key in ipairs(res[2]) do
-- Strip the "main" suffix to find the cache key
local cache_key = string.sub(key, 1, -(string.len("::key") + 1))
local skip = false
local entity, entity_err = redis:get(cache_key .. "::key")
if entity_err == nil then entity = nil end -- prevent concatentation error
local memused, memused_err = redis:get(cache_key .. "::memused")
local score = redis:zscore(cache_key .. "::entities", cache_key .. "::" .. (entity or ""))
local entity_count = redis:zcard(cache_key .. "::entities")
local entity_members = redis:zrange(cache_key .. "::entities", 0, -1)
for _, val in ipairs({ entity, memused, score, entity_count, entity_members }) do
if not val or val == ngx.null then
-- If we're missing something we need (likely evicted) -- delete this key
if delete(redis, cache_key, entity_members) then
keys_deleted = keys_deleted + 1
else
keys_failed = keys_failed + 1
end
skip = true
end
end
-- Watch the main key - if it gets created by real traffic from here on in then
-- the transaction will simply fail.
local res = redis:watch(cache_key .. "::main")
-- Find out if real traffic already created this cache entry
local new_entity = redis:hget(cache_key .. "::main", "entity")
if new_entity and new_entity ~= ngx.null then
-- The old entities refs will still exist, so clean them up
delete_old_entities(redis, cache_key .. "::entities", entity_members, new_entity)
keys_processed = keys_processed + 1
skip = true
end
if not skip then
-- Start transaction
redis:multi()
-- Move main entity to main key
local ok, err = redis:rename(cache_key .. "::" .. entity, cache_key .. "::main")
-- Rename headers etc
for _, k in ipairs({ "headers", "reval_req_headers", "reval_params" }) do
local ok, err = redis:rename(
cache_key .. "::" .. entity .. ":" .. k,
cache_key .. "::" .. k
)
end
-- Create a new entity id and rename the live entity to it
local new_entity_id = random_hex(32)
for _, k in ipairs({ "body", "body_esi" }) do
local ok, err = redis:rename(
cache_key .. "::" .. entity .. ":" .. k,
"ledge:entity:" .. new_entity_id .. ":" .. k
)
end
-- Add the entity to the entities set
local res, err = redis:zadd(cache_key .. "::entities", score, new_entity_id)
-- Remove the old form
local res, err = redis:zrem(
cache_key .. "::entities",
cache_key .. "::" .. entity
)
-- Add the live entity pointer to the main hash, and delete the old pointer
local ok, err = redis:hset(cache_key .. "::main", "entity", new_entity_id)
local ok, err = redis:del(cache_key .. "::key")
-- Add the memused to the main hash, and delete the old key
local ok, err = redis:hset(cache_key .. "::main", "memused", memused)
local ok, err = redis:del(cache_key .. "::memused")
-- Delete entities scheduled for GC but will fail on new codebase
delete_old_entities(redis, cache_key .. "::entities", entity_members, new_entity_id)
local res, err = redis:exec()
if not res or res == ngx.null then
ngx.say("transaction failed")
-- Something went wrong, lets try and delete this cache entry
if delete(redis, cache_key, entity_members) then
keys_deleted = keys_deleted + 1
else
keys_failed = keys_failed + 1
end
else
keys_processed = keys_processed + 1
end
end
end
end
local cursor = tonumber(res[1])
if cursor > 0 then
-- If we have a valid cursor, recurse to move on.
return scan(cursor, redis)
end
return true
end
local dsn = arg[1]
if not dsn then
ngx.say("Please provide a Redis Connector DSN as the first argument, in the form: redis://[PASSWORD@]HOST:PORT/DB")
else
local redis, err = redis_connector:connect{ url = dsn }
if not redis then
ngx.say("Could not connect to Redis with DSN: ", dsn, " - ", err)
return
end
keys_processed = 0
keys_deleted = 0
keys_failed = 0
ngx.say("Migrating Ledge data structure from v1.26 to v1.27\n")
local res, err = scan(0, redis)
if not res or res == ngx.null then
ngx.say("Faied to scan keyspace: ", err)
else
ngx.say("> ", keys_processed .. " cache entries successfully updated")
ngx.say("> ", keys_deleted .. " incomplete / broken cache entries cleaned up")
ngx.say("> ", keys_failed .. " failures\n")
end
end
================================================
FILE: t/01-unit/cache_key.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config();
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Root key is the same with nil ngx.var.args and empty string
--- http_config eval: $::HttpConfig
--- config
location /t {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local ledge_cache_key = require("ledge.cache_key")
local key1 = ledge_cache_key.generate_root_key(nil, nil)
ngx.req.set_uri_args({})
local key2 = ledge_cache_key.generate_root_key(nil, nil)
assert(key1 == key2, "key1 should equal key2")
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 2: Custom key spec
--- http_config eval: $::HttpConfig
--- config
location /t {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local ledge_cache_key = require("ledge.cache_key")
local root_key = ledge_cache_key.generate_root_key(nil, nil)
assert(root_key == "ledge:cache:http:localhost:/t:a=1",
"root_key should be ledge:cache:http:localhost:/t:a=1")
local cache_key_spec = {
"scheme",
"host",
"port",
"uri",
"args",
}
local root_key = ledge_cache_key.generate_root_key(cache_key_spec, nil)
assert(root_key == "ledge:cache:http:localhost:1984:/t:a=1",
"root_key should be ledge:cache:http:localhost:1984:/t:a=1")
local cache_key_spec = {
"host",
"uri",
}
local root_key = ledge_cache_key.generate_root_key(cache_key_spec, nil)
assert(root_key == "ledge:cache:localhost:/t",
"root_key should be ledge:cache:localhost:/t")
local cache_key_spec = {
"host",
"uri",
function() return "hello" end,
}
local root_key = ledge_cache_key.generate_root_key(cache_key_spec, nil)
assert(root_key == "ledge:cache:localhost:/t:hello",
"root_key should be ledge:cache:localhost:/t:hello")
}
}
--- request
GET /t?a=1
--- no_error_log
[error]
=== TEST 3: Errors in cache key spec functions
--- http_config eval: $::HttpConfig
--- config
location /t {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local ledge_cache_key = require("ledge.cache_key")
local cache_key_spec = {
"host",
"uri",
function() return 123 end,
}
local root_key = ledge_cache_key.generate_root_key(cache_key_spec, nil)
assert(root_key == "ledge:cache:localhost:/t",
"cache_key should be ledge:cache:localhost:/t")
local cache_key_spec = {
"host",
"uri",
function() return foo() end,
}
local root_key = ledge_cache_key.generate_root_key(cache_key_spec, nil)
assert(root_key == "ledge:cache:localhost:/t",
"cache_key should be ledge:cache:localhost:/t")
}
}
--- request
GET /t?a=2
--- error_log
functions supplied to cache_key_spec must return a string
error in function supplied to cache_key_spec
=== TEST 4: URI args are sorted (normalised)
--- http_config eval: $::HttpConfig
--- config
location /t {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local ledge_cache_key = require("ledge.cache_key")
local root_key = ledge_cache_key.generate_root_key(nil, nil)
ngx.print(root_key)
}
}
--- request eval
[
"GET /t",
"GET /t?a=1",
"GET /t?aba=1&aab=2",
"GET /t?a=1&b=2&c=3",
"GET /t?b=2&a=1&c=3",
"GET /t?c=3&a=1&b=2",
"GET /t?c=3&b&a=1",
"GET /t?c=3&b=&a=1",
"GET /t?c=3&b=2&a=1&b=4",
]
--- response_body eval
[
"ledge:cache:http:localhost:/t:",
"ledge:cache:http:localhost:/t:a=1",
"ledge:cache:http:localhost:/t:aab=2&aba=1",
"ledge:cache:http:localhost:/t:a=1&b=2&c=3",
"ledge:cache:http:localhost:/t:a=1&b=2&c=3",
"ledge:cache:http:localhost:/t:a=1&b=2&c=3",
"ledge:cache:http:localhost:/t:a=1&b&c=3",
"ledge:cache:http:localhost:/t:a=1&b=&c=3",
"ledge:cache:http:localhost:/t:a=1&b=2&b=4&c=3",
]
--- no_error_log
[error]
=== TEST 5: Max URI args
--- http_config eval: $::HttpConfig
--- config
location /t {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local ledge_cache_key = require("ledge.cache_key")
local root_key = ledge_cache_key.generate_root_key(nil, 2)
ngx.print(root_key)
}
}
--- request eval
[
"GET /t",
"GET /t?a=1",
"GET /t?b=2&a=1",
"GET /t?c=3&b=2&a=1",
]
--- response_body eval
[
"ledge:cache:http:localhost:/t:",
"ledge:cache:http:localhost:/t:a=1",
"ledge:cache:http:localhost:/t:a=1&b=2",
"ledge:cache:http:localhost:/t:b=2&c=3",
]
--- no_error_log
[error]
=== TEST 6: Wildcard purge URIs
--- http_config eval: $::HttpConfig
--- config
location /t {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local ledge_cache_key = require("ledge.cache_key")
local root_key = ledge_cache_key.generate_root_key(nil, nil)
ngx.print(root_key)
}
}
--- request eval
[
"PURGE /t*",
"PURGE /t?*",
"PURGE /t?a=1*",
"PURGE /t?a=*",
]
--- response_body eval
[
"ledge:cache:http:localhost:/t*:*",
"ledge:cache:http:localhost:/t:*",
"ledge:cache:http:localhost:/t:a=1*",
"ledge:cache:http:localhost:/t:a=*",
]
--- no_error_log
[error]
=== TEST 7: Compare vary spec
--- http_config eval: $::HttpConfig
--- config
location /t {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local vary_compare = require("ledge.cache_key").vary_compare
-- Compare vary specs
local changed = vary_compare({}, {})
assert(changed == true, "empty table == empty table")
local changed = vary_compare({}, nil)
assert(changed == true, "empty table == nil")
local changed = vary_compare(nil, {})
assert(changed == true, "nil == empty table")
local changed = vary_compare({"Foo"}, {"Foo"})
assert(changed == true, "table == table")
local changed = vary_compare({"Foo", "Bar"}, {"Foo", "Bar"})
assert(changed == true, "table == table (multi-values")
local changed = vary_compare({"Foo", "bar"}, {"foo", "Bar"})
--assert(changed == true, "table == table (case)")
local changed = vary_compare({"Foo"}, {})
assert(changed == false, "table ~= empty table")
local changed = vary_compare({}, {"Foo"})
assert(changed == false, "empty table ~= table")
local changed = vary_compare({"Foo"}, nil)
assert(changed == false, "table ~= nil")
local changed = vary_compare(nil, {"Foo"})
assert(changed == false, "nil ~= table")
local changed = vary_compare({"Foo"}, {})
assert(changed == false, "table ~= empty table")
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 8: Generate vary key
--- http_config eval: $::HttpConfig
--- config
location /t {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local function log(...)
ngx.log(ngx.DEBUG, ...)
end
local generate_vary_key = require("ledge.cache_key").generate_vary_key
local called_flag = false
local callback = function(vary_key)
assert(type(vary_key) == "table", "callback receives vary key_table")
called_flag = true
end
-- Set headers
ngx.req.set_header("Foo", "Bar")
ngx.req.set_header("X-Test", "value")
called_flag = false
-- Empty/nil spec
local vary_key = generate_vary_key(nil, nil, nil)
log(vary_key)
assert(vary_key == "", "Nil spec generates empty string")
local vary_key = generate_vary_key({}, nil, nil)
log(vary_key)
assert(vary_key == "", "Empty spec generates empty string")
local vary_key = generate_vary_key(nil, callback, nil)
log(vary_key)
assert(called_flag == true, "Callback is called with nil spec")
assert(vary_key == "", "Nil vary spec not modified with noop function")
called_flag = false
local vary_key = generate_vary_key({}, callback, nil)
log(vary_key)
assert(called_flag == true, "Callback is called with empty spec")
assert(vary_key == "", "Empty vary spec not modified with noop function")
called_flag = false
-- With spec
local vary_key = generate_vary_key({"Foo"}, callback, nil)
log(vary_key)
assert(called_flag == true, "Callback is called")
assert(vary_key == "foo:bar", "Vary spec not modified with noop function")
called_flag = false
local vary_key = generate_vary_key({"Foo", "X-Test"}, callback, nil)
log(vary_key)
assert(called_flag == true, "Callback is called - multivalue spec")
assert(string.find(vary_key, "foo:bar"), "Vary spec not modified with noop function - multivalue spec")
assert(string.find(vary_key, "x-test:value"), "Vary spec not modified with noop function - multivalue spec")
assert(string.len(vary_key) == string.len("x-test:value:foo:bar"), "Vary spec not modified with noop function - multivalue spec only contains required headers")
called_flag = false
ngx.req.set_header("Foo", {"Foo1", "Foo2"})
local vary_key = generate_vary_key({"Foo", "X-Test"}, callback, nil)
log(vary_key)
assert(called_flag == true, "Callback is called - multivalue header")
assert(string.find(vary_key, "foo:foo1,foo2"), "Vary spec - multivalue header")
assert(string.find(vary_key, "x-test:value"), "Vary spec - multivalue header")
assert(string.len(vary_key) == string.len("x-test:value:foo:foo1,foo2"), "Vary spec - multivalue header only contains required headers")
called_flag = false
ngx.req.set_header("Foo", "Bar")
-- Active callback
callback = function(vary_key)
vary_key["MyVal"] = "Arbitrary"
end
local vary_key = generate_vary_key(nil, callback, nil)
log(vary_key)
assert(vary_key == "myval:arbitrary", "Callback modifies key with nil spec")
local vary_key = generate_vary_key({}, callback, nil)
log(vary_key)
assert(vary_key == "myval:arbitrary", "Callback modifies key with empty spec")
local vary_key = generate_vary_key({"Foo"}, callback, nil)
log(vary_key)
assert(string.find(vary_key, "foo:bar"), "Callback appends key with spec")
assert(string.find(vary_key, "myval:arbitrary"), "Callback appends key with spec")
assert(string.len(vary_key) == string.len("myval:arbitrary:foo:bar"), "Callback appends key with spec only contains required headers")
local vary_key = generate_vary_key({"Foo", "X-Test"}, callback, nil)
log(vary_key)
assert(string.find(vary_key, "myval:arbitrary"), "Callback appends key with spec - multi values")
assert(string.find(vary_key, "foo:bar"), "Callback appends key with spec - multi values")
assert(string.find(vary_key, "x-test:value"), "Callback appends key with spec - multi values")
assert(string.len(vary_key) == string.len("myval:arbitrary:foo:bar:x-test:value"), "Callback appends key with spec - multi values only contains required headers")
callback = function(vary_key)
vary_key["Foo"] = "Arbitrary"
end
local vary_key = generate_vary_key({"Foo"}, callback, nil)
log(vary_key)
assert(vary_key == "foo:arbitrary", "Callback overrides key spec")
callback = function(vary_key)
vary_key["Foo"] = nil
end
local vary_key = generate_vary_key({"Foo"}, callback, nil)
log(vary_key)
assert(vary_key == "", "Callback removes from key spec")
callback = function(vary_key)
assert(vary_key["X-None"] == ngx.null, "Spec values with missing headers appear as null")
end
local vary_key = generate_vary_key({"X-None"}, callback, nil)
log(vary_key)
assert(vary_key == "", "Missing values do not appear in key")
local vary_key = generate_vary_key({"A", "B"}, nil, {["A"] = "123", ["B"] = "xyz"})
log(vary_key)
assert(string.find(vary_key, "a:123"), "Vary key from arbitrary headers")
assert(string.find(vary_key, "b:xyz"), "Vary key from arbitrary headers")
assert(string.len(vary_key) == string.len("a:123:b:xyz"), "Vary key from arbitrary headers only contains required headers")
local vary_key = generate_vary_key({"Foo", "B"}, nil, {["Foo"] = "123", ["B"] = "xyz"})
log(vary_key)
assert(string.find(vary_key, "foo:123"), "Vary key from arbitrary headers")
assert(string.find(vary_key, "b:xyz"), "Vary key from arbitrary headers")
assert(string.len(vary_key) == string.len("foo:123:b:xyz"), "Vary key from arbitrary headers only contains required headers")
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 9: Read vary spec
--- http_config eval: $::HttpConfig
--- config
location /t {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local redis, err = require("ledge").create_redis_connection()
if not redis then
error("redis borked: " .. tostring(err))
end
local read_vary_spec = require("ledge.cache_key").read_vary_spec
local root_key = "ledge:dummy:root:"
local vary_spec_key = root_key.."::vary"
local spec, err = read_vary_spec()
assert(spec == nil and err ~= nil, "Redis required to read spec")
local spec, err = read_vary_spec(redis)
assert(spec == nil and err ~= nil, "Root key required to read spec")
redis.smembers = function() return nil, "Redis Error" end
local spec, err = read_vary_spec(redis, root_key)
assert(spec == nil and err == "Redis Error", "Redis error returned")
redis.smembers = require("resty.redis").smembers
local exists = redis:exists(vary_spec_key)
local spec, err = read_vary_spec(redis, root_key)
assert(type(spec) == "table" and #spec == 0 and exists == 0, "Missing key returns empty table")
redis:sadd(vary_spec_key, "Foo")
redis:sadd(vary_spec_key, "Bar")
local spec, err = read_vary_spec(redis, root_key)
table.sort(spec)
assert(type(spec) == "table" and #spec == 2 and spec[2] == "Foo" and spec[1] == "Bar", "Spec returned")
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 10: Key chain
--- http_config eval: $::HttpConfig
--- config
location /t {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local key_chain = require("ledge.cache_key").key_chain
local root_key = "ledge:dummy:root:"
local vary_key = "foo:bar:test:value"
local vary_spec = {"Foo", "Test"}
local expected = {
main = "ledge:dummy:root:#foo:bar:test:value::main",
entities = "ledge:dummy:root:#foo:bar:test:value::entities",
headers = "ledge:dummy:root:#foo:bar:test:value::headers",
reval_params = "ledge:dummy:root:#foo:bar:test:value::reval_params",
reval_req_headers = "ledge:dummy:root:#foo:bar:test:value::reval_req_headers",
}
local extra = {
vary = "ledge:dummy:root:::vary",
repset = "ledge:dummy:root:::repset",
root = "ledge:dummy:root:",
full = "ledge:dummy:root:#foo:bar:test:value",
fetching_lock = "ledge:dummy:root:#foo:bar:test:value::fetching",
}
local chain, err = key_chain()
assert(chain == nil and err ~= nil, "Root key required")
local chain, err = key_chain(root_key)
assert(chain == nil and err ~= nil, "Vary key required")
local chain, err = key_chain(root_key, vary_key)
assert(chain == nil and err ~= nil, "Vary spec required")
local chain, err = key_chain(root_key, vary_key, vary_spec)
assert(type(chain) == "table", "key chain returned")
local i = 0
for k,v in pairs(chain) do
i = i +1
ngx.log(ngx.DEBUG, k, ": ", v, " == ", expected[k])
assert(expected[k] == v, k.." chain mismatch")
end
assert(i == 5, "5 iterable keys: "..i)
for k,v in pairs(expected) do
ngx.log(ngx.DEBUG, k,": ", v, " == ", chain[k])
assert(chain[k] == v, k.." expected mismatch")
end
for k,v in pairs(extra) do
ngx.log(ngx.DEBUG, k,": ", v, " == ", chain[k])
assert(chain[k] == v, k.." extra mismatch")
i = i +1
end
assert(i == 10, "10 total chain entries: "..i)
for i,v in ipairs(vary_spec) do
assert(chain.vary_spec[i] == v, " Vary spec mismatch")
end
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 11: Save key chain
--- http_config eval: $::HttpConfig
--- config
location /t {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local redis = require("ledge").create_redis_connection()
local key_chain = require("ledge.cache_key").key_chain
local save_key_chain = require("ledge.cache_key").save_key_chain
local root_key = "ledge:dummy:root:"
local vary_key = "foo:bar:test:value"
local vary_spec = {"Foo", "Test"}
local chain = key_chain(root_key, vary_key, vary_spec)
local ok, err = save_key_chain()
assert(ok == nil and err ~= nil, "Redis required")
local ok, err = save_key_chain(redis)
assert(ok == nil and err ~= nil, "Key chain required")
local ok, err = save_key_chain(redis, "foo")
assert(ok == nil and err ~= nil, "Key chain must be a table")
local ok, err = save_key_chain(redis, {})
assert(ok == nil and err ~= nil, "Key chain must not be empty")
local ok, err = save_key_chain(redis, chain)
assert(ok == nil and err ~= nil, "TTL required")
local ok, err = save_key_chain(redis, chain, "foo")
assert(ok == nil and err ~= nil, "TTL must be a number")
-- Create main key
redis:set(chain.main, "foobar")
local ok, err = save_key_chain(redis, chain, 3600)
assert(ok == true , "returns true")
assert(redis:exists(chain.vary) == 1, "Vary spec key created")
assert(redis:exists(chain.repset) == 1, "Repset created")
local vs = redis:smembers(chain.vary)
for _, v in pairs(vs) do
local match = false
for _, v2 in ipairs(vary_spec) do
if v2:lower() == v then
match = true
end
end
assert(match, "Vary spec saved: ")
end
local vs = redis:smembers(chain.repset)
for _, v in pairs(vs) do
assert(v == chain.full, "Full key added to repset")
end
assert(redis:ttl(chain.vary) == 3600, "Vary spec expiry set")
assert(redis:ttl(chain.repset) == 3600, "Repset expiry set")
local vary_spec = {"Baz", "Qux"}
local chain = key_chain(root_key, vary_key, vary_spec)
local ok, err = save_key_chain(redis, chain, 3600)
local vs = redis:smembers(chain.vary)
for i, v in pairs(vs) do
local match = false
for _, v2 in ipairs(vary_spec) do
if v2:lower() == v then
match = true
end
end
assert(match, "Vary spec overwritten")
end
redis:sadd(chain.repset, "dummy_value")
local ok, err = save_key_chain(redis, chain, 3600)
local vs = redis:smembers(chain.repset)
for _, v in pairs(vs) do
assert(v ~= "dummy_value", "Missing keys are removed from repset")
end
redis:del(chain.repset)
local chain = key_chain(root_key, vary_key, {})
local ok, err = save_key_chain(redis, chain, 3600)
assert(redis:exists(chain.vary ) == 0, "Empty spec removes vary key")
assert(redis:exists(chain.repset) == 1, "Empty spec still creates repset")
local chain = key_chain(root_key, vary_key, {"Foo", "Bar", "Foo", "bar"})
local ok, err = save_key_chain(redis, chain, 3600)
assert(redis:scard(chain.vary) == 2, "Deduplicate vary fields")
}
}
--- request
GET /t
--- no_error_log
[error]
================================================
FILE: t/01-unit/esi.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config();
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: split_esi_token
--- http_config eval: $::HttpConfig
--- config
location /t {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local esi = assert(require("ledge.esi"),
"module should load without errors")
local capability, version = esi.split_esi_token("ESI/1.0")
assert(capability == "ESI" and version == 1.0,
"capability and version should be returned")
local ok, cap, ver = pcall(esi.split_esi_token)
assert(ok and not cap and not ver,
"split_esi_token without a token should safely return nil")
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 2: esi_capabilities
--- http_config eval: $::HttpConfig
--- config
location /t {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
assert(require("ledge.esi").esi_capabilities() == "ESI/1.0",
"capabilities should be ESI/1.0")
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 3: choose_esi_processor
--- http_config eval: $::HttpConfig
--- config
location /t {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
-- handler stub
local handler = {
response = {
header = {
["Surrogate-Control"] = [[content=ESI/1.0]],
}
}
}
local processor = require("ledge.esi").choose_esi_processor(handler)
assert(next(processor), "processor should be a table")
assert(type(processor.get_scan_filter) == "function",
"get_scan_filter should be a function")
assert(type(processor.get_process_filter) == "function",
"get_process_filter should be a function")
-- unknown processor
handler.response.header["Surrogate-Control"] = [[content=FOO/2.0]]
assert(not require("ledge.esi").choose_esi_processor(handler),
"processor should be nil")
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 4: is_allowed_content_type
--- http_config eval: $::HttpConfig
--- config
location /t {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local res = {
header = {
["Content-Type"] = "text/html",
}
}
local allowed_types = {
"text/html"
}
local is_allowed_content_type =
require("ledge.esi").is_allowed_content_type
assert(is_allowed_content_type(res, allowed_types),
"text/html is allowed")
res.header["Content-Type"] = "text/ht"
assert(not is_allowed_content_type(res, allowed_types),
"text/ht is not allowed")
res.header["Content-Type"] = "text/html_foo"
assert(not is_allowed_content_type(res, allowed_types),
"text/html_foo is not allowed")
res.header["Content-Type"] = "text/html; charset=utf-8"
assert(is_allowed_content_type(res, allowed_types),
"text/html; charset=utf-8 is allowed")
res.header["Content-Type"] = "text/json"
assert(not is_allowed_content_type(res, allowed_types),
"text/json is not allowed")
table.insert(allowed_types, "text/json")
assert(is_allowed_content_type(res, allowed_types),
"text/json is allowed")
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 5: can_delegate_to_surrogate
--- http_config eval: $::HttpConfig
--- config
location /t {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local can_delegate_to_surrogate =
require("ledge.esi").can_delegate_to_surrogate
assert(not can_delegate_to_surrogate(true, "ESI/1.0"),
"cannot delegate without capability")
ngx.req.set_header("Surrogate-Capability", "localhost=ESI/1.0")
assert(can_delegate_to_surrogate(true, "ESI/1.0"),
"can delegate with capability")
assert(not can_delegate_to_surrogate(true, "FOO/1.2"),
"cannnot delegate to non-supported capability")
assert(can_delegate_to_surrogate({ "127.0.0.1" }, "ESI/1.0" ),
"can delegate to loopback with capability")
assert(not can_delegate_to_surrogate({ "127.0.0.2" }, "ESI/1.0" ),
"cant delegate to non-loopback with capability")
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 6: filter_esi_args
--- http_config eval: $::HttpConfig
--- config
location /t {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
local filter_esi_args = require("ledge.esi").filter_esi_args
local args = ngx.req.get_uri_args()
assert(args.a == "1" and args.esi_foo == "bar bar" and args.b == "2",
"request args should be intact")
filter_esi_args(handler)
local args = ngx.req.get_uri_args()
assert(args.a == "1" and not args.esi_foo and args.b == "2",
"esi args should be removed")
assert(ngx.ctx.__ledge_esi_args.foo == "bar bar",
"esi args should have foo: bar bar")
assert(tostring(ngx.ctx.__ledge_esi_args) == "esi_foo=bar%20bar",
"esi_args as a string should be foo=bar%20bar")
}
}
--- request
GET /t?a=1&esi_foo=bar+bar&b=2
--- no_error_log
[error]
================================================
FILE: t/01-unit/events.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config();
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Bind and emit
--- http_config eval: $::HttpConfig
--- config
location /t {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
local ok, err = handler:bind("non_event", function(arg) end)
assert(not ok and err == "no such event: non_event",
"err should be set")
local function say(arg) ngx.say(arg) end
local ok, err = handler:bind("after_cache_read", say)
assert(ok and not err, "bind should return positively")
local ok, err = pcall(handler.emit, handler, "non_event")
assert(not ok and err == "attempt to emit non existent event: non_event",
"emit should fail with non-event")
-- Bind and emit all events
handler:bind("before_upstream_request", say)
handler:bind("after_upstream_request", say)
handler:bind("before_save", say)
handler:bind("before_save_revalidation_data", say)
handler:bind("before_serve", say)
handler:bind("before_esi_include_request", say)
handler:bind("before_vary_selection", say)
handler:emit("after_cache_read", "after_cache_read")
handler:emit("before_upstream_request", "before_upstream_request")
handler:emit("after_upstream_request", "after_upstream_request")
handler:emit("before_save", "before_save")
handler:emit("before_save_revalidation_data", "before_save_revalidation_data")
handler:emit("before_serve", "before_serve")
handler:emit("before_esi_include_request", "before_esi_include_request")
handler:emit("before_vary_selection", "before_vary_selection")
}
}
--- request
GET /t
--- response_body
after_cache_read
before_upstream_request
after_upstream_request
before_save
before_save_revalidation_data
before_serve
before_esi_include_request
before_vary_selection
--- error_log
no such event: non_event
=== TEST 2: Bind multiple functions to an event
--- http_config eval: $::HttpConfig
--- config
location /t {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
for i = 1, 3 do
handler:bind("after_cache_read", function()
ngx.say("function ", i)
end)
end
handler:emit("after_cache_read")
}
}
--- request
GET /t
--- response_body
function 1
function 2
function 3
--- no_error_log
[error]
=== TEST 3: Default binds
--- http_config eval
qq {
lua_package_path "./lib/?.lua;../lua-resty-redis-connector/lib/?.lua;../lua-resty-qless/lib/?.lua;../lua-resty-http/lib/?.lua;../lua-ffi-zlib/lib/?.lua;;";
init_by_lua_block {
if $LedgeEnv::test_coverage == 1 then
require("luacov.runner").init()
end
require("ledge").bind("after_cache_read", function(arg)
ngx.say("default 1: ", arg)
end)
require("ledge").bind("after_cache_read", function(arg)
ngx.say("default 2: ", arg)
end)
}
}
--- config
location /t {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local ledge = require("ledge")
local ok, err = pcall(ledge.bind, "after_cache_read", function(arg)
ngx.say(arg)
end)
assert(not ok and string.find(err, "attempt to call bind outside the 'init' phase"), err)
local handler = require("ledge").create_handler()
handler:bind("after_cache_read", function(arg)
ngx.say("instance 1: ", arg)
end)
handler:bind("after_cache_read", function(arg)
ngx.say("instance 2: ", arg)
end)
handler:emit("after_cache_read", "foo")
}
}
--- request
GET /t
--- response_body
default 1: foo
default 2: foo
instance 1: foo
instance 2: foo
--- no_error_log
[error]
================================================
FILE: t/01-unit/handler.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config(extra_lua_config => qq{
-- For TEST 2
TEST_NGINX_PORT = $LedgeEnv::nginx_port
});
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Load module
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local handler, err = require("ledge").create_handler()
assert(handler,
"create_handler() should return postively, got: " .. tostring(err))
local ok, err = require("ledge.handler").new()
assert(not ok, "new with empty config should return negatively")
assert(err == "config table expected",
"err should be 'config table expected'")
local handler = require("ledge.handler")
local ok, err = pcall(function()
handler.foo = "bar"
end)
assert(not ok, "setting unknown field should error")
assert(string.find(err, "attempt to create new field foo"),
"err should be 'attempt to create new field foo'")
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 2: Override config defaults
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local handler = assert(require("ledge").create_handler({
upstream_host = "example.com",
}), "create_handler should return positively")
assert(handler.config.upstream_host == "example.com",
"upstream_host should be example.com")
assert(handler.config.upstream_port == TEST_NGINX_PORT,
"upstream_port should default to " .. TEST_NGINX_PORT)
-- Change config
handler.config.upstream_port = 81
assert(handler.config.upstream_port == 81,
"upstream_port should be 81")
-- Unknown config field
local ok, err = pcall(function()
handler.config.foo = "bar"
end)
assert(not ok, "setting unknown config should error")
assert(string.find(err, "attempt to create new field foo"),
"err should be 'attempt to create new field foo'")
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 3: Call run on simple request without errors
--- http_config eval: $::HttpConfig
--- config
location /t_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
assert(require("ledge").create_handler():run(),
"run should return positively")
}
}
location /t {
echo "OK";
}
--- request
GET /t_prx
--- response_body
OK
--- no_error_log
[error]
=== TEST 4: Bind / emit
--- http_config eval: $::HttpConfig
--- config
location /t_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
function add_header(res)
res.header["X-Foo"] = "bar"
end
-- Bind succeeds
local ok, err = assert(handler:bind("before_serve", add_header),
"bind should return positively")
-- Bad event name
local ok, err = handler:bind("foo", add_header)
assert(not ok, "bind should return negatively")
assert(err == "no such event: foo",
"err should be 'no such event: foo'")
-- Bad user event
handler:bind("before_serve", function(res) error("oops", 2) end)
handler:run()
}
}
location /t {
echo "OK";
}
--- request
GET /t_prx
--- response_body
OK
--- response_headers
X-Foo: bar
--- error_log
no such event: foo
error in user callback for 'before_serve': oops
=== TEST 5: visible hostname
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
-- Defaults to the hostname of the server
local visible_hostname = string.lower(require("ledge").create_handler().config.visible_hostname)
local host = string.lower(ngx.var.hostname)
assert(visible_hostname == host,
"visible_hostname "..tostring(visible_hostname).." should be "..host)
-- Test overriding the visible_hostname
local host = "example.com"
local visible_hostname = string.lower(require("ledge").create_handler({ visible_hostname = host }).config.visible_hostname)
assert(visible_hostname == host,
"visible_hostname should be " .. host)
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 6: read from cache
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local handler = require("ledge").create_handler()
local redis = require("ledge").create_redis_connection()
-- Set redis and read the cache key
handler.redis = redis
handler:cache_key_chain()
-- Unset redis again
handler.redis = {}
local res, err = handler:read_from_cache()
assert(res == nil and err ~= nil,
"read_from_cache should error with no redis connections")
handler.redis = redis
handler.storage = require("ledge").create_storage_connection(
handler.config.storage_driver,
handler.config.storage_driver_config
)
local res, err = handler:read_from_cache()
assert(res and not err, "read_from_cache should return positively")
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 7: Call run with bad redis details
--- http_config eval
qq{
resolver local=on;
lua_package_path "./lib/?.lua;;";
init_by_lua_block {
if $LedgeEnv::test_coverage == 1 then
require("luacov.runner").init()
end
require("ledge").configure({
redis_connector_params = {
url = "redis://redis:0/",
},
qless_db = 123,
})
require("ledge").set_handler_defaults({
upstream_host = "$LedgeEnv::nginx_host",
upstream_port = $LedgeEnv::nginx_port,
storage_driver_config = {
redis_connector_params = {
url = "redis://$LedgeEnv::redis_host:$LedgeEnv::redis_port/$LedgeEnv::redis_database"
}
},
})
require("ledge.state_machine").set_debug(true)
}
}
--- config
location /t {
lua_socket_log_errors Off;
content_by_lua_block {
local ok, err = require("ledge").create_handler():run()
assert(ok == nil and err ~= nil,
"run should return negatively with an error")
ngx.say("OK")
}
}
--- request
GET /t
--- response_body
OK
--- no_error_log
[error]
=== TEST 8: save to cache
--- http_config eval: $::HttpConfig
--- config
location /t_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
local redis = require("ledge").create_redis_connection()
handler.redis = redis
handler:cache_key_chain()
handler.redis = {}
local res, err = handler:save_to_cache()
assert(res == nil and err ~= nil,
"read_from_cache should error with no response")
local res, err = handler:fetch_from_origin()
assert(res == nil and err ~= nil,
"fetch_from_origin should error with no redis")
handler.redis = redis
handler.storage = require("ledge").create_storage_connection(
handler.config.storage_driver,
handler.config.storage_driver_config
)
local res, err = handler:fetch_from_origin()
assert(res and not err, "fetch_from_origin should return positively")
local res, err = handler:save_to_cache(res)
ngx.log(ngx.DEBUG, res, " ", err)
assert(res and not err, "save_to_cache should return positively")
ngx.say("OK")
}
}
location /t {
echo "origin";
}
--- request
GET /t_prx
--- no_error_log
[error]
--- response_body
OK
=== TEST 8: save to cache, no body
--- http_config eval: $::HttpConfig
--- config
location /t_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
local res, err = handler:save_to_cache()
assert(res == nil and err ~= nil,
"read_from_cache should error with no response")
handler.redis = require("ledge").create_redis_connection()
handler.storage = require("ledge").create_storage_connection(
handler.config.storage_driver,
handler.config.storage_driver_config
)
local res, err = handler:fetch_from_origin()
assert(res and not err, "fetch_from_origin should return positively")
res.has_body = false
local res, err = handler:save_to_cache(res)
ngx.log(ngx.DEBUG, res, " ", err)
assert(res and not err, "save_to_cache should return positively")
ngx.say("OK")
}
}
location /t {
echo "origin";
}
--- request
GET /t_prx
--- no_error_log
[error]
--- response_body
OK
================================================
FILE: t/01-unit/jobs.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config(extra_nginx_config => qq{
lua_shared_dict ledge_test 1m;
});
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Collect entity
Prime cache then collect the entity
--- http_config eval: $::HttpConfig
--- config
location /t {
rewrite ^ /cache break;
content_by_lua_block {
local redis = require("ledge").create_redis_connection()
redis:flushall() -- Previous tests create some odd keys
local collect_entity = require("ledge.jobs.collect_entity")
local handler = require("ledge").create_handler()
local entity_id = ngx.shared.ledge_test:get("entity_id")
ngx.log(ngx.DEBUG, "Collecting: ", entity_id)
local job = {
data = {
entity_id = entity_id,
storage_driver = handler.config.storage_driver,
storage_driver_config = handler.config.storage_driver_config,
}
}
local ok, err, msg = collect_entity.perform(job)
assert(err == nil, "collect_entity should not return an error")
local storage = require("ledge").create_storage_connection(
handler.config.storage_driver,
handler.config.storage_driver_config
)
local ok, err = storage:exists(entity_id)
assert(ok == false, "Entity should not exist")
-- Failure cases
job.data.storage_driver = "bad"
local ok, err, msg = collect_entity.perform(job)
ngx.log(ngx.DEBUG, msg)
assert(err == "job-error" and msg ~= nil, "collect_entity should return job-error")
job.data.storage_driver = handler.config.storage_driver
job.data.storage_driver_config = { bad_config = "here" }
local ok, err, msg = collect_entity.perform(job)
ngx.log(ngx.DEBUG, msg)
assert(err == "job-error" and msg ~= nil, "collect_entity should return job-error")
}
}
location /cache_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("before_serve", function(res)
ngx.log(ngx.DEBUG, "primed entity: ", res.entity_id)
ngx.shared.ledge_test:set("entity_id", res.entity_id)
end)
handler:run()
}
}
location /cache {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("TEST 1")
}
}
--- request eval
[
"GET /cache_prx",
"GET /t"
]
--- no_error_log
[error]
=== TEST 2: Revalidate
Prime, Purge, revalidate
--- http_config eval: $::HttpConfig
--- config
location /t {
rewrite ^ /cache2 break;
content_by_lua_block {
local revalidate = require("ledge.jobs.revalidate")
local redis = require("ledge").create_redis_connection()
local handler = require("ledge").create_handler()
handler.redis = redis
local job = {
redis = redis,
data = {
key_chain = handler:cache_key_chain()
}
}
local ok, err, msg = revalidate.perform(job)
assert(err == nil, "revalidate should not return an error")
assert(ngx.shared.ledge_test:get("test2") == "Revalidate Request received",
"Revalidate request was not received!"
)
redis:del(job.data.key_chain.reval_req_headers)
local ok, err, msg = revalidate.perform(job)
assert(err == "job-error" and msg ~= nil, "revalidate should return an error")
redis:del(job.data.key_chain.reval_params)
local ok, err, msg = revalidate.perform(job)
assert(err == "job-error" and msg ~= nil, "revalidate should return an error")
}
}
location /cache2_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:run()
}
}
location /cache2 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=10"
ngx.print("TEST 2")
if string.find(ngx.req.get_headers().user_agent, "revalidate", 1, true) then
ngx.shared.ledge_test:set("test2", "Revalidate Request received")
end
}
}
--- request eval
[
"GET /cache2_prx",
"PURGE /cache2_prx",
"GET /t"
]
--- no_error_log
[error]
=== TEST 3: Revalidate - inline params
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local revalidate = require("ledge.jobs.revalidate")
local job = {
data = {
reval_params = {
server_addr = ngx.var.server_addr,
server_port = ngx.var.server_port,
scheme = ngx.var.scheme,
uri = "/cache3",
connect_timeout = 1000,
send_timeout = 1000,
read_timeout = 1000,
keepalive_timeout = 60,
keepalive_poolsize = 10,
},
reval_headers = {
["X-Test"] = "test_header"
}
}
}
local ok, err, msg = revalidate.perform(job)
assert(err == nil, "revalidate should not return an error")
assert(ngx.shared.ledge_test:get("test3") == "test_header",
"Revalidate request was not received!"
)
local job = {
data = {
reval_params = {
server_addr = ngx.var.server_addr,
server_port = ngx.var.server_port,
scheme = ngx.var.scheme,
uri = "/cache_slow",
connect_timeout = 1000,
send_timeout = 100,
read_timeout = 100,
keepalive_timeout = 60,
keepalive_poolsize = 10,
},
reval_headers = {
["X-Test"] = "test_header"
}
}
}
local ok, err, msg = revalidate.perform(job)
assert(err == "job-error" and msg ~= nil, "revalidate should return an error")
local job = {
data = {
reval_params = {
server_addr = ngx.var.server_addr,
server_port = ngx.var.server_port+1,
scheme = ngx.var.scheme,
uri = "/cache3",
connect_timeout = 1000,
send_timeout = 1000,
read_timeout = 1000,
keepalive_timeout = 60,
keepalive_poolsize = 10,
},
reval_headers = {
["X-Test"] = "test_header"
}
}
}
local ok, err, msg = revalidate.perform(job)
ngx.log(ngx.DEBUG, msg)
assert(err == "job-error" and msg ~= nil, "revalidate should return an error")
local job = {
redis = {
hgetall = function(...) return ngx.null end
},
data = {
key_chain = {}
}
}
local ok, err, msg = revalidate.perform(job)
ngx.log(ngx.DEBUG, msg)
assert(err == "job-error" and msg ~= nil, "revalidate should return an error")
local job = {
redis = {
hgetall = function(...) return nil, "dummy error" end
},
data = {
key_chain = {}
}
}
local ok, err, msg = revalidate.perform(job)
ngx.log(ngx.DEBUG, msg)
assert(err == "job-error" and msg ~= nil, "revalidate should return an error")
}
}
location /cache3 {
content_by_lua_block {
ngx.shared.ledge_test:set("test3", ngx.req.get_headers()["X-Test"])
}
}
location /cache_slow {
content_by_lua_block{
ngx.sleep(1)
ngx.print("OK")
}
}
--- request
GET /t
--- error_code: 200
=== TEST 4: purge
--- http_config eval: $::HttpConfig
--- config
location /t {
rewrite ^ /cache break;
content_by_lua_block {
local purge_job = require("ledge.jobs.purge")
local redis = require("ledge").create_redis_connection()
local handler = require("ledge").create_handler()
handler.redis = redis
local heartbeat_flag = false
local job = {
redis = redis,
data = {
repset = "*::repset",
keyspace_scan_count = 2,
purge_mode = "invalidate",
storage_driver = handler.config.storage_driver,
storage_driver_config = handler.config.storage_driver_config,
},
ttl = function() return 5 end,
heartbeat = function()
heartbeat_flag = true
return heartbeat_flag
end,
}
-- Failure cases
job.data.storage_driver = "bad"
local ok, err, msg = purge_job.perform(job)
ngx.log(ngx.DEBUG, msg)
assert(err == "redis-error" and msg ~= nil, "purge should return redis-error")
job.data.storage_driver = handler.config.storage_driver
job.data.storage_driver_config = { bad_config = "here" }
local ok, err, msg = purge_job.perform(job)
ngx.log(ngx.DEBUG, msg)
assert(err == "redis-error" and msg ~= nil, "purge should return redis-error")
-- Passing case
job.data.storage_driver_config = handler.config.storage_driver_config
local ok, err, msg = purge_job.perform(job)
assert(err == nil, "purge should not return an error")
assert(heartbeat_flag == true, "Purge should heartbeat")
-- Heartbeat failure
job.heartbeat = function() return false end
local ok, err, msg = purge_job.perform(job)
ngx.log(ngx.DEBUG, msg)
assert(err == "redis-error" and msg == "Failed to heartbeat job", "purge should return heartbeat error")
job.heartbeat = function() return true end
-- Missing redis driver
job.redis = nil
local ok, err, msg = purge_job.perform(job)
ngx.log(ngx.DEBUG, msg)
assert(err == "job-error" and msg ~= nil, "purge should return job-error")
}
}
location /cache4_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler():run()
}
}
location /cache4 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("TEST 4")
}
}
--- request eval
[
"GET /cache4_prx","GET /cache4_prx?a=1","GET /cache4_prx?a=2","GET /cache4_prx?a=3","GET /cache4_prx?a=4","GET /cache4_prx?a=5",
"GET /t",
"GET /cache4_prx?a=3"
]
--- response_headers_like eval
["X-Cache: MISS from .*", "X-Cache: MISS from .*","X-Cache: MISS from .*","X-Cache: MISS from .*","X-Cache: MISS from .*","X-Cache: MISS from .*",
"",
"X-Cache: MISS from .*"]
--- no_error_log
[error]
================================================
FILE: t/01-unit/ledge.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config(extra_lua_config => qq{
qless_db = $LedgeEnv::redis_qless_database
});
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Load module without errors.
--- http_config eval: $::HttpConfig
--- config
location /ledge_1 {
content_by_lua_block {
assert(require("ledge"), "module should load without errors")
}
}
--- request
GET /ledge_1
--- no_error_log
[error]
=== TEST 2: Module cannot be externally modified
--- http_config eval: $::HttpConfig
--- config
location /ledge_2 {
content_by_lua_block {
local ledge = require("ledge")
local ok, err = pcall(function()
ledge.foo = "bar"
end)
assert(string.find(err, "attempt to create new field foo"),
"error 'field foo does not exist' should be thrown")
}
}
--- request
GET /ledge_2
--- no_error_log
[error]
=== TEST 3: Non existent params cannot be set
--- http_config eval
qq {
lua_package_path "./lib/?.lua;../lua-resty-redis-connector/lib/?.lua;../lua-resty-qless/lib/?.lua;;";
init_by_lua_block {
if $LedgeEnv::test_coverage == 1 then
require("luacov.runner").init()
end
local ok, err = pcall(require("ledge").configure, { foo = "bar" })
assert(string.find(err, "field foo does not exist"),
"error 'field foo does not exist' should be thrown")
}
}
--- config
location /ledge_3 {
echo "OK";
}
--- request
GET /ledge_3
--- no_error_log
[error]
=== TEST 4: Params cannot be set outside of init
--- http_config eval: $::HttpConfig
--- config
location /ledge_4 {
content_by_lua_block {
require("ledge").configure({ qless_db = 4 })
}
}
--- request
GET /ledge_4
--- error_code: 500
--- error_log
attempt to call configure outside the 'init' phase
=== TEST 5: Create redis connection
--- http_config eval: $::HttpConfig
--- config
location /ledge_5 {
content_by_lua_block {
local redis = assert(require("ledge").create_redis_connection(),
"create_redis_connection() should return positively")
assert(redis:set("ledge_5:cat", "dog"),
"redis:set() should return positively")
local val, err = redis:get("ledge_5:cat")
ngx.say(val)
assert(require("ledge").close_redis_connection(redis),
"close_redis_connection() should return positively")
}
}
--- request
GET /ledge_5
--- response_body
dog
--- no_error_log
[error]
=== TEST 6: Create bad redis connection
--- http_config eval
qq{
lua_package_path "./lib/?.lua;../lua-resty-redis-connector/lib/?.lua;../lua-resty-qless/lib/?.lua;;";
init_by_lua_block {
if $LedgeEnv::test_coverage == 1 then
require("luacov.runner").init()
end
require("ledge").configure({
redis_connector_params = {
port = 0, -- bad port
},
})
}
}
--- config
location /ledge_6 {
content_by_lua_block {
assert(not require("ledge").create_redis_connection(),
"create_redis_connection() should return negatively")
}
}
--- request
GET /ledge_6
--- error_log eval: qr/connect\(\)( to 127.0.0.1:0)? failed/
=== TEST 7: Create storage connection
--- http_config eval: $::HttpConfig
--- config
location /ledge_7 {
content_by_lua_block {
local storage = assert(require("ledge").create_storage_connection(),
"create_storage_connection should return positively")
ngx.say(storage:exists("ledge_7:123456"))
assert(require("ledge").close_storage_connection(storage),
"close_storage_connection() should return positively")
}
}
--- request
GET /ledge_7
--- response_body
false
--- no_error_log
[error]
=== TEST 8: Create bad storage connection
--- http_config eval
qq{
lua_package_path "./lib/?.lua;../lua-resty-redis-connector/lib/?.lua;../lua-resty-qless/lib/?.lua;;";
init_by_lua_block {
if $LedgeEnv::test_coverage == 1 then
require("luacov.runner").init()
end
require("ledge").set_handler_defaults({
storage_driver_config = {
redis_connector_params = {
port = 0,
},
}
})
}
}
--- config
location /ledge_8 {
content_by_lua_block {
assert(not require("ledge").create_storage_connection(),
"create_storage_connection() should return negatively")
}
}
--- request
GET /ledge_8
--- error_log eval: qr/connect\(\)( to 127.0.0.1:0)? failed/
=== TEST 9: Create qless connection
--- http_config eval: $::HttpConfig
--- config
location /ledge_9 {
content_by_lua_block {
local redis = assert(require("ledge").create_qless_connection(),
"create_qless_connection() should return positively")
assert(redis:set("ledge_9:cat", "dog"),
"redis:set() should return positively")
assert(require("ledge").close_redis_connection(redis),
"close_redis_connection() should return positively")
local redis = require("ledge").create_redis_connection()
assert(redis:select(qless_db), "select() shoudl return positively")
local val, err = redis:get("ledge_9:cat")
ngx.say(val)
assert(require("ledge").close_redis_connection(redis),
"close_redis_connection() should return positively")
}
}
--- request
GET /ledge_9
--- response_body
dog
--- no_error_log
[error]
=== TEST 10: Bad redis-connector params are caught
--- http_config eval
qq{
lua_package_path "./lib/?.lua;../lua-resty-redis-connector/lib/?.lua;../lua-resty-qless/lib/?.lua;;";
init_by_lua_block {
if $LedgeEnv::test_coverage == 1 then
require("luacov.runner").init()
end
require("ledge").configure({
redis_connector_params = {
bad_time = true
},
})
require("ledge").set_handler_defaults({
storage_driver_config = {
redis_connector_params = {
bad_time2 = true
},
}
})
}
}
--- config
location /ledge_10 {
content_by_lua_block {
local ok, err = require("ledge").create_redis_connection()
assert(ok == nil and err ~= nil,
"create_redis_connection() should return negatively with error")
local ok, err = require("ledge").create_storage_connection()
assert(ok == nil and err ~= nil,
"create_storage_connection() should return negatively with error")
local ok, err = require("ledge").create_qless_connection()
assert(ok == nil and err ~= nil,
"create_qless_connection() should return negatively with error")
local ok, err = require("ledge").create_redis_slave_connection()
assert(ok == nil and err ~= nil,
"create_redis_slave_connection() should return negatively with error")
-- Test broken redis-connector params are caught when closing redis somehow
local ok, err = require("ledge").close_redis_connection({dummy = true})
assert(ok == nil and err ~= nil,
"close_redis_connection() should return negatively with error")
-- Test trying to close a non-existent redis instance
local ok, err = require("ledge").close_redis_connection({})
assert(ok == nil and err ~= nil,
"close_redis_connection() should return negatively with error")
ngx.say("OK")
}
}
--- request
GET /ledge_10
--- error_code: 200
--- response_body
OK
=== TEST 11: Closing an empty redis instance
--- http_config eval: $::HttpConfig
--- config
location /ledge_11 {
content_by_lua_block {
local ok, err = require("ledge").close_redis_connection({})
assert(ok == nil,
"close_redis_connection() should return negatively")
ngx.say("OK")
}
}
--- request
GET /ledge_11
--- error_code: 200
--- response_body
OK
================================================
FILE: t/01-unit/processor_1_0.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config();
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Load module
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local processor = assert(require("ledge.esi.processor_1_0"),
"module should load without errors")
local processor = processor.new(require("ledge").create_handler())
assert(processor, "processor_1_0.new should return positively")
ngx.say("OK")
}
}
--- request
GET /t
--- error_code: 200
--- no_error_log
[error]
--- response_body
OK
=== TEST 2: esi_eval_var - QUERY STRING
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local processor = require("ledge.esi.processor_1_0")
local tests = {
--{"var_name", "key", "default", "default_quoted" },
{"QUERY_STRING", nil, "default", "default_quoted" },
{"QUERY_STRING", nil, nil, "default_quoted" },
{"QUERY_STRING", "test_param", "default", "default_quoted" },
{"QUERY_STRING", "test_param", nil, "default_quoted" },
}
for _,test in ipairs(tests) do
ngx.say(processor.esi_eval_var(test))
end
}
}
--- request eval
[
"GET /t",
"GET /t?test_param=test",
"GET /t?other_param=test",
"GET /t?test_param=test&test_param=test2",
]
--- no_error_log
[error]
--- response_body eval
[
"default
default_quoted
default
default_quoted
",
"test_param=test
test_param=test
test
test
",
"other_param=test
other_param=test
default
default_quoted
",
"test_param=test&test_param=test2
test_param=test&test_param=test2
test, test2
test, test2
",
]
=== TEST 3: esi_eval_var - HTTP header
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local processor = require("ledge.esi.processor_1_0")
local tests = {
--{"var_name", "key", "default", "default_quoted" },
{"HTTP_X_TEST", nil, "default", "default_quoted" },
{"HTTP_X_TEST", nil, nil, "default_quoted" },
}
for _,test in ipairs(tests) do
ngx.say(processor.esi_eval_var(test))
end
}
}
--- request eval
[
"GET /t",
"GET /t",
]
--- more_headers eval
[
"X-Dummy: foo",
"X-TEST: test_val"
]
--- no_error_log
[error]
--- response_body eval
[
"default
default_quoted
",
"test_val
test_val
",
]
=== TEST 4: esi_eval_var - Duplicate HTTP header
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local processor = require("ledge.esi.processor_1_0")
local tests = {
--{"var_name", "key", "default", "default_quoted" },
{"HTTP_X_TEST", nil, "default", "default_quoted" },
{"HTTP_X_TEST", nil, nil, "default_quoted" },
}
for _,test in ipairs(tests) do
ngx.say(processor.esi_eval_var(test))
end
}
}
--- request eval
[
"GET /t",
"GET /t",
]
--- more_headers eval
[
"X-Dummy: foo",
"X-TEST: test_val
X-TEST: test_val2"
]
--- no_error_log
[error]
--- response_body eval
[
"default
default_quoted
",
"test_val, test_val2
test_val, test_val2
",
]
=== TEST 5: esi_eval_var - Cookie
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local processor = require("ledge.esi.processor_1_0")
local tests = {
--{"var_name", "key", "default", "default_quoted" },
{"HTTP_COOKIE", nil, "default", "default_quoted" },
{"HTTP_COOKIE", "test_cookie", "default", "default_quoted" },
{"HTTP_COOKIE", "test_cookie", nil, "default_quoted" },
}
for _,test in ipairs(tests) do
ngx.say(processor.esi_eval_var(test))
end
}
}
--- request eval
[
"GET /t",
"GET /t",
"GET /t",
]
--- more_headers eval
[
"",
"Cookie: none=here",
"Cookie: test_cookie=my_cookie"
]
--- no_error_log
[error]
--- response_body eval
[
"default
default
default_quoted
",
"none=here
default
default_quoted
",
"test_cookie=my_cookie
my_cookie
my_cookie
",
]
=== TEST 6: esi_eval_var - Accept-Lang
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local processor = require("ledge.esi.processor_1_0")
local tests = {
--{"var_name", "key", "default", "default_quoted" },
{"HTTP_ACCEPT_LANGUAGE", nil, "default", "default_quoted" },
{"HTTP_ACCEPT_LANGUAGE", "en", "default", "default_quoted" },
{"HTTP_ACCEPT_LANGUAGE", "de", nil, "default_quoted" },
}
for _,test in ipairs(tests) do
ngx.say(processor.esi_eval_var(test))
end
}
}
--- request eval
[
"GET /t",
"GET /t",
"GET /t",
"GET /t",
]
--- more_headers eval
[
"",
"Accept-Language: en-gb",
"Accept-Language: en-us, blah",
"Accept-Language: en-gb
Accept-Language: test"
]
--- no_error_log
[error]
--- response_body eval
[
"default
default
default_quoted
",
"en-gb
true
false
",
"en-us, blah
true
false
",
"en-gb, test
true
false
",
]
=== TEST 7: esi_eval_var - ESI_ARGS
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
-- Fake ESI args
require("ledge.esi").filter_esi_args(
require("ledge").create_handler()
)
local processor = require("ledge.esi.processor_1_0")
local tests = {
--{"var_name", "key", "default", "default_quoted" },
{"ESI_ARGS", nil, "default", "default_quoted" },
{"ESI_ARGS", "var1", "default", "default_quoted" },
{"ESI_ARGS", "var2", nil, "default_quoted" },
}
local str_split = require("ledge.util").string.split
for _,test in ipairs(tests) do
-- The default encoded string has a non-deterministic ordering due
-- to being decoded and re-encoded. For test purposes, we explicitly
-- re-order.
local res = processor.esi_eval_var(test)
local args = str_split (res, "&")
if #args > 1 then
table.sort(args)
res = ""
for _, v in ipairs (args) do
res = res .. v .. "&"
end
end
ngx.say(res)
end
}
}
--- request eval
[
"GET /t",
"GET /t?esi_var1=test1&esi_var2=test2&foo=bar",
"GET /t?esi_var2=test2&foo=bar",
"GET /t?esi_var1=test1&esi_other_var=foo&foo=bar",
"GET /t?esi_var1=test1&esi_var1=test2&foo=bar",
]
--- no_error_log
[error]
--- response_body eval
[
"default
default
default_quoted
",
"esi_var1=test1&esi_var2=test2&
test1
test2
",
"esi_var2=test2
default
test2
",
"esi_other_var=foo&esi_var1=test1&
test1
default_quoted
",
"esi_var1=test1&esi_var1=test2&
test1,test2
default_quoted
",
]
=== TEST 8: esi_eval_var - custom vars
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
ngx.ctx.__ledge_esi_custom_variables = ngx.req.get_uri_args() or {}
if ngx.ctx.__ledge_esi_custom_variables["empty"] then
ngx.ctx.__ledge_esi_custom_variables = {}
else
ngx.ctx.__ledge_esi_custom_variables["deep"] = {["table"] = "value!"}
end
local processor = require("ledge.esi.processor_1_0")
local tests = {
--{"var_name", "key", "default", "default_quoted" },
{"var1", nil, "default", "default_quoted" },
{"var2", nil, nil, "default_quoted" },
{"var1", "subvar", nil, "default_quoted" },
{"deep", "table", "default", "default_quoted" },
}
for _,test in ipairs(tests) do
ngx.say(processor.esi_eval_var(test))
end
}
}
--- request eval
[
"GET /t",
"GET /t?var1=test1&var2=test2",
"GET /t?var2=test2",
"GET /t?empty=true",
]
--- no_error_log
[error]
--- response_body eval
[
"default
default_quoted
default_quoted
value!
",
"test1
test2
default_quoted
value!
",
"default
test2
default_quoted
value!
",
"default
default_quoted
default_quoted
default
",
]
=== TEST 9: esi_process_vars_tag
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
ngx.ctx.__ledge_esi_custom_variables = {
["DANGER_ZONE"] = ''
}
local processor = require("ledge.esi.processor_1_0")
local tests = {
-- vars tags
{
["chunk"] = [[$(QUERY_STRING)]],
["res"] = [[test_param=test]],
["msg"] = "vars tag"
},
{
["chunk"] = [[before $(QUERY_STRING) after]],
["res"] = [[before test_param=test after]],
["msg"] = "vars tag - outside content"
},
{
["chunk"] = [[before $(QUERY_STRING) after]],
["res"] = [[before test_param=test after]],
["msg"] = "vars tag - inside content"
},
{
["chunk"] = [[ $(QUERY_STRING{test_param}) ]],
["res"] = [[ test ]],
["msg"] = "vars tag - whitespace"
},
{
["chunk"] = [[$(QUERY_STRING)
]],
["res"] = [[test_param=test
]],
["msg"] = "vars tag - html tags"
},
{
["chunk"] = [[]],
["res"] = [[]],
["msg"] = "empty vars tags removed"
},
{
["chunk"] = [[foo
]],
["res"] = [[foo
]],
["msg"] = "empty vars tags removed - content preserved"
},
-- injecting ESI tags from vars
{
["chunk"] = [[$(DANGER_ZONE)]],
["res"] = [[<esi:include src="/kenny" />]],
["msg"] = "Injected tags are escaped"
},
}
for _, t in pairs(tests) do
local output = processor.esi_process_vars_tag(t["chunk"])
ngx.log(ngx.DEBUG, "'", output, "'")
assert(output == t["res"], "esi_process_vars_tag mismatch: "..t["msg"] )
end
ngx.say("OK")
}
}
location /kenny {
content_by_lua_block {
ngx.print("Shouldn't see this")
}
}
--- request
GET /t?test_param=test
--- no_error_log
[error]
--- response_body
OK
=== TEST 12: process_escaping
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local processor = require("ledge.esi.processor_1_0")
local tests = {
{
["chunk"] = [[Lorem ipsum dolor sit amet, consectetur adipiscing elit.]],
["res"] = [[Lorem ipsum dolor sit amet, consectetur adipiscing elit.]],
["msg"] = "nothing to escape"
},
{
["chunk"] = [[Loremconsectetur adipiscing elit.]],
["res"] = [[Lorem ipsum dolor sit amet, consectetur adipiscing elit.]],
["msg"] = "no esi inside"
},
{
["chunk"] = [[Loremconsectetur adipiscing elit.]],
["res"] = [[Lorem $(QUERY_STRING)ipsum dolor sit amet, consectetur adipiscing elit.]],
["msg"] = "esi:vars inside"
},
}
for _, t in pairs(tests) do
local output = processor.process_escaping(t["chunk"])
ngx.log(ngx.DEBUG, "'", output, "'")
assert(output == t["res"], "process_escaping mismatch: "..t["msg"] )
end
ngx.say("OK")
}
}
--- request
GET /t?test_param=test
--- no_error_log
[error]
--- response_body
OK
=== TEST 13: fetch include
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
-- Override the normal coroutine.yield function
local output
coroutine.yield = function(chunk) output = chunk end
local processor = require("ledge.esi.processor_1_0")
local handler = require("ledge").create_handler()
local self = {
handler = handler
}
local buffer_size = 64*1024
local tests = {
{
["tag"] = [[]],
["res"] = [[fragment]],
["msg"] = "nothing to escape"
},
{
["tag"] = [[]],
["res"] = [[fragmentfoobar]],
["msg"] = "Query string var is evaluated"
},
}
for _, t in pairs(tests) do
local ret = processor.esi_fetch_include(self, t["tag"], buffer_size)
ngx.log(ngx.DEBUG, "RET: '", ret, "'")
ngx.log(ngx.DEBUG, "OUTPUT: '", output, "'")
assert(output == t["res"], "esi_fetch_include mismatch: "..t["msg"] )
end
ngx.say("OK")
}
}
location /f {
content_by_lua_block {
ngx.print("fragment", ngx.var.args or "")
}
}
--- request
GET /t?test=foobar
--- no_error_log
[error]
--- response_body
OK
================================================
FILE: t/01-unit/purge.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config();
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: create_purge_response
--- http_config eval: $::HttpConfig
--- config
location /t {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local cjson_decode = require("cjson").decode
local create_purge_response = assert(
require("ledge.purge").create_purge_response,
"module should load without errors"
)
local json, err = create_purge_response("invalidate", "purged")
local data = cjson_decode(json)
assert(not err, "err should be nil")
assert(data.purge_mode == "invalidate",
"purge mode should be invalidate")
assert(data.result == "purged",
"result should be purged")
assert(not data.qless_jobs, "qless_jobs should be nil")
local json, err = create_purge_response("revalidate", "scheduled", {
jid = "12345",
})
local data = cjson_decode(json)
assert(not err, "err should be nil")
assert(data.qless_jobs.jid == "12345",
"qless_job.jid should be '12345'")
local json, err = create_purge_response(function() end)
assert(err == "Cannot serialise function: type not supported",
"error should be 'Cannot serialise function: type not supported")
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 2: expire keys
--- http_config eval: $::HttpConfig
--- config
location /t {
rewrite ^ /cache break;
content_by_lua_block {
local handler = require("ledge").create_handler()
local redis = require("ledge").create_redis_connection()
handler.redis = redis
local storage = require("ledge").create_storage_connection(
handler.config.storage_driver,
handler.config.storage_driver_config
)
handler.storage = storage
local key_chain = handler:cache_key_chain()
local entity_id = handler:entity_id(key_chain)
local ttl, err = redis:ttl(key_chain.main)
local expire_keys = require("ledge.purge").expire_keys
local ok, err = expire_keys(redis, storage, key_chain, entity_id)
if err then ngx.log(ngx.DEBUG, err) end
assert(ok, "expire_keys should return positively")
local expires, err = redis:hget(key_chain.main, "expires")
ngx.log(ngx.DEBUG,"expires: ", expires, " <= ", ngx.now())
assert(tonumber(expires) <= ngx.now(), "Key not expired")
local new_ttl = redis:ttl(key_chain.main)
ngx.log(ngx.DEBUG, "ttl: ", tonumber(ttl), " > ", tonumber(new_ttl))
assert(tonumber(ttl) > tonumber(new_ttl), "TTL not reduced")
-- non-existent key
local ok, err = expire_keys(redis, storage, {main = "bogus_key"}, entity_id)
if err then ngx.log(ngx.DEBUG, err) end
assert(ok == false and err == nil, "return false with no error on missing key")
-- Stub out a partial main key, no ttl
redis:hset("bogus_key", "key", "value")
local ok, err = expire_keys(redis, storage, {main = "bogus_key"}, entity_id)
if err then ngx.log(ngx.DEBUG, err) end
assert(ok == nil and err ~= nil, "return nil with no ttl")
-- Set a TTL
redis:expire("bogus_key", 9000)
local ok, err = expire_keys(redis, storage, {main = "bogus_key"}, entity_id)
if err then ngx.log(ngx.DEBUG, err) end
assert(ok == nil and err ~= nil, "return nil with error on broken key")
-- String expires value
redis:hset("bogus_key", "expires", "now!")
local ok, err = expire_keys(redis, storage, {main = "bogus_key"}, entity_id)
if err then ngx.log(ngx.DEBUG, err) end
assert(ok == nil and err ~= nil, "return nil with error on string expires")
}
}
location /cache_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("before_serve", function(res)
ngx.log(ngx.DEBUG, "primed entity: ", res.entity_id)
end)
handler:run()
}
}
location /cache {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("TEST 2")
}
}
--- request eval
[
"GET /cache_prx",
"GET /t"
]
--- no_error_log
[error]
=== TEST 3: purge
--- http_config eval: $::HttpConfig
--- config
location /t {
rewrite ^ /cache3 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
local redis = require("ledge").create_redis_connection()
handler.redis = redis
local storage = require("ledge").create_storage_connection(
handler.config.storage_driver,
handler.config.storage_driver_config
)
handler.storage = storage
local key_chain = handler:cache_key_chain()
ngx.log(ngx.DEBUG, require("cjson").encode(key_chain))
local purge = require("ledge.purge").purge
-- invalidate - error
local ok, err = purge(handler, "invalidate", "bad_key")
--local ok, err = purge(handler, "invalidate", {main = "bogus_key3"})
if err then ngx.log(ngx.DEBUG, err) end
assert(ok == false and err == "nothing to purge", "purge should return false - bad key")
-- invalidate
local ok, err = purge(handler, "invalidate", key_chain.repset)
if err then ngx.log(ngx.DEBUG, err) end
assert(ok == true and err == "purged", "purge should return true - purged")
-- revalidate
local reval_job = false
handler.revalidate_in_background = function()
reval_job = true
return "job"
end
local ok, err, job = purge(handler, "revalidate", key_chain.repset)
if err then ngx.log(ngx.DEBUG, err) end
assert(ok == false and err == "already expired", "purge should return false - already expired")
assert(reval_job == true, "revalidate should schedule job")
assert(job[1] == "job", "revalidate should return the job "..tostring(job))
-- delete, error
handler.delete_from_cache = function() return nil, "delete error" end
local ok, err = purge(handler, "delete", key_chain.repset)
if err then ngx.log(ngx.DEBUG, err) end
assert(ok == nil and err == "delete error", "purge should return nil, error")
handler.delete_from_cache = require("ledge.handler").delete_from_cache
-- delete
local ok, err = purge(handler, "delete", key_chain.repset)
if err then ngx.log(ngx.DEBUG, "dekete: ",err) end
assert(ok == true and err == "deleted", "purge should return true - deleted")
-- delete, missing
local ok, err = purge(handler, "delete", key_chain.repset)
if err then ngx.log(ngx.DEBUG, err) end
assert(ok == false and err == "nothing to purge", "purge should return false - nothing to purge")
local keys = redis:keys(key_chain.root.."*")
ngx.log(ngx.DEBUG, require("cjson").encode(keys))
assert(#keys == 0, "Keys have all been removed")
}
}
location /cache3_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:run()
}
}
location /cache {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("TEST 3")
}
}
--- request eval
[
"GET /cache3_prx",
"GET /t"
]
--- no_error_log
[error]
=== TEST 3b: purge with vary
--- http_config eval: $::HttpConfig
--- config
location /t {
rewrite ^ /cache3 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
local redis = require("ledge").create_redis_connection()
handler.redis = redis
local storage = require("ledge").create_storage_connection(
handler.config.storage_driver,
handler.config.storage_driver_config
)
handler.storage = storage
local key_chain = handler:cache_key_chain()
ngx.log(ngx.DEBUG, require("cjson").encode(key_chain))
local purge = require("ledge.purge").purge
-- invalidate
local ok, err = purge(handler, "invalidate", key_chain.repset)
if err then ngx.log(ngx.DEBUG, err) end
assert(ok == true and err == "purged", "purge should return true - purged")
-- revalidate
local reval_job = false
local jobcount = 0
handler.revalidate_in_background = function()
jobcount = jobcount + 1
reval_job = true
return "job"..jobcount
end
local ok, err, job = purge(handler, "revalidate", key_chain.repset)
if err then ngx.log(ngx.DEBUG, err) end
assert(ok == false and err == "already expired", "purge should return false - already expired")
assert(reval_job == true, "revalidate should schedule job")
assert(job[1] == "job1" and job[2] == "job2", "revalidate should return the job "..tostring(job))
assert(jobcount == 2, "Revalidate should schedule 1 job per representation")
-- delete
local ok, err = purge(handler, "delete", key_chain.repset)
if err then ngx.log(ngx.DEBUG, "dekete: ",err) end
assert(ok == true and err == "deleted", "purge should return true - deleted")
local keys = redis:keys(key_chain.root.."*")
ngx.log(ngx.DEBUG, require("cjson").encode(keys))
assert(#keys == 0, "Keys have all been removed")
}
}
location /cache3_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:run()
}
}
location /cache {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.header["Vary"] = "X-Test"
ngx.say("TEST 3b")
}
}
--- request eval
[
"GET /cache3_prx", "GET /cache3_prx",
"GET /t"
]
--- more_headers eval
[
"X-Test: foo", "X-Test: bar",
""
]
--- no_error_log
[error]
=== TEST 4: purge api
--- http_config eval: $::HttpConfig
--- config
location /t {
rewrite ^ /cache4 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
local redis = require("ledge").create_redis_connection()
handler.redis = redis
local storage = require("ledge").create_storage_connection(
handler.config.storage_driver,
handler.config.storage_driver_config
)
handler.storage = storage
-- Stub out response object
local response = {
status = 0,
body,
set_body = function(self, body)
self.body = body
end
}
handler.response = response
local json_body = nil
ngx.req.get_body_data = function()
return json_body
end
local purge_api = require("ledge.purge").purge_api
-- Nil body
local ok, err = purge_api(handler)
if response.body then ngx.log(ngx.DEBUG, response.body) end
assert(ok == false and response.body ~= nil, "nil body should return false")
response.body = nil
-- Invalid json
json_body = [[ foobar ]]
local ok, err = purge_api(handler)
if response.body then ngx.log(ngx.DEBUG, response.body) end
assert(ok == false and response.body ~= nil, "invalid json should return false")
response.body = nil
-- Valid json, bad request
json_body = [[{"foo": "bar"}]]
local ok, err = purge_api(handler)
if response.body then ngx.log(ngx.DEBUG, response.body) end
assert(ok == false and response.body ~= nil, "bad request should return false")
response.body = nil
-- Valid API request
json_body = require("cjson").encode({
uris = {
"http://"..ngx.var.host..":"..ngx.var.server_port.."/cache4_prx"
},
purge_mode = "delete",
headers = {
["X-Test"] = "Test Header"
}
})
local ok, err = purge_api(handler)
if response.body then ngx.log(ngx.DEBUG, response.body) end
assert(ok == true and response.body ~= nil, "valid request should return true")
response.body = nil
local res, err = redis:exists(handler:cache_key_chain().main)
if err then ngx_log(ngx.ERR, err) end
assert(res == 0, "Key should have been removed")
-- Custom headers should be added to request
json_body = require("cjson").encode({
uris = {
"http://"..ngx.var.host..":"..ngx.var.server_port.."/hdr_test"
},
purge_mode = "delete",
headers = {
["X-Test"] = "Test Header"
}
})
local ok, err = purge_api(handler)
if response.body then ngx.log(ngx.DEBUG, response.body) end
local match = response.body:find("X-Test: Test Header")
assert(ok == true and match ~= nil, "custom header s should pass through")
response.body = nil
}
}
location /cache4_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge.state_machine").set_debug(false)
local handler = require("ledge").create_handler()
handler:run()
}
}
location /hdr_test {
content_by_lua_block {
ngx.print(ngx.DEBUG, "X-Test: ", ngx.req.get_headers()["X-Test"])
}
}
location /cache {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=4600"
ngx.say("TEST 4")
}
}
--- request eval
[
"GET /cache4_prx",
"GET /t"
]
--- no_error_log
[error]
================================================
FILE: t/01-unit/range.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config();
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: req_byte_ranges
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local req_byte_ranges = assert(require("ledge.range").req_byte_ranges,
"range module should load without errors")
local ranges = req_byte_ranges()
local t = tonumber(ngx.req.get_uri_args().t)
if t == 1 then
assert(not ranges,
"req_byte_ranges with no range header should return nil")
elseif t == 2 then
assert(ranges[1], "range should exist")
assert(ranges[1].from == 0 and ranges[1].to == 99,
"req_byte_ranges should be from 0 to 99")
elseif t == 3 then
assert(not ranges,
"req_byte_ranges with malformed range header should return nil")
elseif t == 4 then
assert(ranges[1], "range should exist")
assert(ranges[1].from == 0 and not ranges[1].to,
"req_byte_ranges should be from 0 to nil")
elseif t == 5 then
assert(ranges[1], "range should exist")
assert(not ranges[1].from and ranges[1].to == 99,
"req_byte_ranges should be from 0 to 99")
elseif t == 6 then
assert(ranges[1], "range should exist")
assert(not ranges[1].from and not ranges[1].to,
"req_byte_ranges should be from 0 to 99")
elseif t == 7 then
assert(ranges[1] and ranges[2] and not ranges[3],
"two ranges should exist")
assert(ranges[1].from == 0 and ranges[1].to == 10,
"ranges[1] should be from 0 to 10")
assert(ranges[2].from == 20 and ranges[2].to == 30,
"ranges[2] should be 20 to 30")
elseif t == 8 then
assert(ranges[1] and ranges[2] and not ranges[3],
"two ranges should exist")
assert(ranges[1].from == 0 and not ranges[1].to,
"ranges[1] should be from 0 to nil")
assert(not ranges[2].from and ranges[2].to == 30,
"ranges[2] should be nil to 30")
end
}
}
--- more_headers eval
[
"",
"Range: bytes=0-99",
"Range: 0-99",
"Range: bytes=0-",
"Range: bytes=-99",
"Range: bytes=-",
"Range: bytes=0-10,20-30",
"Range: bytes=0-,-30",
]
--- request eval
[
"GET /t?t=1",
"GET /t?t=2",
"GET /t?t=3",
"GET /t?t=4",
"GET /t?t=5",
"GET /t?t=6",
"GET /t?t=7",
"GET /t?t=8",
]
--- no_error_log
[error]
=== TEST 2: handle_range_request
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local range = require("ledge.range").new()
local args = ngx.req.get_uri_args()
-- Response stub
local response = {
status = tonumber(args.status),
size = tonumber(args.size),
header = {}
}
local range_applied = false
response, range_applied = range:handle_range_request(response)
local t = tonumber(ngx.req.get_uri_args().t)
if t == 1 then
assert(response and not range_applied,
"response should not be nil but range was not applied")
elseif t == 2 then
assert(response and range_applied,
"response should not be nil and range should be applied")
assert(response.status == 206,
"status should be 206")
assert(response.header["Content-Range"] == "bytes 0-99/200",
"content_range header should be set")
elseif t == 3 then
assert(response and range_applied,
"response should not be nil and range should be applied")
assert(response.status == 206,
"status should be 206")
assert(response.header["Content-Range"] == "bytes 0-70/200",
"content_range header should be set to coalesced ranges")
elseif t == 4 then
assert(response and range_applied,
"response should not be nil and range should be applied")
assert(response.status == 206,
"status should be 206")
assert(response.header["Content-Range"] == "bytes 0-199/200",
"Content-Range header should be expanded to size")
elseif t == 5 then
assert(response and range_applied,
"response should not be nil and range should be applied")
assert(response.status == 206,
"status should be 206")
local ct = response.header["Content-Type"]
assert(string.find(ct, "multipart/byteranges;"),
"Content-Type header should incude multipart/byteranges")
elseif t == 6 then
assert(response and not range_applied,
"response should not be nil but range was not applied")
assert(response.status == 416,
"status should be 416 (Not Satisfiable)")
elseif t == 7 then
assert(response and not range_applied,
"response should not be nil but range was not applied")
end
}
}
--- more_headers eval
[
"",
"Range: bytes=0-99",
"Range: bytes=0-30,20-70",
"Range: bytes=0-",
"Range: bytes=0-10,20-30",
"Range: bytes=40-20",
"Range: bytes=0-10",
]
--- request eval
[
"GET /t?t=1&size=100&status=200",
"GET /t?t=2&size=200&status=200",
"GET /t?t=3&size=200&status=200",
"GET /t?t=4&size=200&status=200",
"GET /t?t=5&size=200&status=200",
"GET /t?t=6&size=200&status=200",
"GET /t?t=7&size=200&status=404",
]
--- no_error_log
[error]
=== TEST 3: get_range_request_filter
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local range = require("ledge.range").new()
local args = ngx.req.get_uri_args()
-- Response stub
local response = {
status = 200,
size = 10,
header = {},
body_reader = coroutine.wrap(function()
coroutine.yield("01234")
coroutine.yield("56789")
end),
}
if args["type"] then
response.header["Content-Type"] = args["type"]
end
local function read_body(response)
local res = ""
repeat
local chunk, err = response.body_reader()
if chunk then
res = res .. chunk
end
until not chunk
return res
end
local range_applied = false
response, range_applied = range:handle_range_request(response)
if range_applied then
response.body_reader = range:get_range_request_filter(
response.body_reader
)
end
local body = read_body(response)
local t = tonumber(ngx.req.get_uri_args().t)
if t == 1 then
assert(body == "0123456789", "body should be un-filtered")
elseif t == 2 then
assert(body == "0123", "body should be 0123")
elseif t == 3 then
assert(body == "2345678", "body should be 2345678")
elseif t == 4 then
assert(body == "456789", "body should be 456789")
elseif t == 5 then
assert(body == "23456789", "body should be 23456789")
elseif t == 6 then
assert(response.status == 206, "status should be 206")
local ct = response.header["Content-Type"]
assert(string.find(ct, "multipart/byteranges;"),
"Content-Type header should incude multipart/byteranges")
assert(ngx.re.find(
body,
[[^(Content-Range: bytes 0-4\/10\n$)]],
"m"
), "body should contain Content-Range bytes 0-4/10")
assert(ngx.re.find(
body,
[[^Content-Range: bytes 6-9\/10\n$]],
"m"
), "body should contain Content-Range bytes 6-9/10")
elseif t == 7 then
assert(body == "3456789", "ranges should be coalesced")
elseif t == 8 then
assert(body == "0123456789", "body should be unfiltered")
assert(response.status == 206, response.status)
assert(response.header["Content-Range"] == "bytes 0-9/10",
"Content-Range header should be trimmed to size")
end
}
}
--- more_headers eval
[
"Range: bytes=0-9",
"Range: bytes=0-3",
"Range: bytes=2-8",
"Range: bytes=4-",
"Range: bytes=-8",
"Range: bytes=0-4,6-9",
"Range: bytes=0-4,6-9",
"Range: bytes=3-6,6-9",
"Range: bytes=0-11",
]
--- request eval
[
"GET /t?t=1",
"GET /t?t=2",
"GET /t?t=3",
"GET /t?t=4",
"GET /t?t=5",
"GET /t?t=6",
"GET /t?t=6&type=text/html",
"GET /t?t=7",
"GET /t?t=8",
]
--- no_error_log
[error]
=== TEST 4: parse_content_range
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local parse_content_range = require("ledge.range").parse_content_range
local from, to, size = parse_content_range("bytes 1-2/3")
assert(from == 1 and to == 2 and size == 3)
from, to, size = parse_content_range("byte 1-2/3")
assert(not from and not to and not size)
from, to, size = parse_content_range("bytes 123-1234/12345")
assert(from == 123 and to == 1234 and size == 12345)
}
}
--- request
GET /t
--- no_error_log
[error]
================================================
FILE: t/01-unit/request.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config(extra_lua_config => qq{
TEST_NGINX_HOST = "$LedgeEnv::nginx_host"
TEST_NGINX_PORT = $LedgeEnv::nginx_port
});
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Purge mode
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local req_purge_mode = assert(require("ledge.request").purge_mode,
"request module should load without errors")
local mode = ngx.req.get_uri_args()["p"]
assert(req_purge_mode() == mode,
"req_purge_mode should equal " .. mode)
}
}
--- more_headers eval
[
"X-Purge: delete",
"X-Purge: revalidate",
"X-Purge: invalidate",
""
]
--- request eval
[
"GET /t?p=delete",
"GET /t?p=revalidate",
"GET /t?p=invalidate",
"GET /t?p=invalidate"
]
--- no_error_log
[error]
=== TEST 2: relative_uri - spaces encoded
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local http = require("resty.http").new()
http:connect(
TEST_NGINX_HOST, TEST_NGINX_PORT
)
local res, err = http:request({
path = "/t with spaces",
})
http:close()
}
}
location "/t with spaces" {
content_by_lua_block {
local req_relative_uri = require("ledge.request").relative_uri
assert(req_relative_uri() == "/t%20with%20spaces",
"uri should have spaces encoded")
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 3: relative_uri - Percent encode encoded CRLF
http://resources.infosecinstitute.com/http-response-splitting-attack
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local http = require("resty.http").new()
http:connect(
TEST_NGINX_HOST, TEST_NGINX_PORT
)
local res, err = http:request({
path = "/t_crlf_encoded_%250d%250A",
})
http:close()
}
}
location /t_crlf_encoded_ {
content_by_lua_block {
local req_relative_uri = require("ledge.request").relative_uri
assert(req_relative_uri() == "/t_crlf_encoded_%250D%250A",
"encoded crlf in uri should be escaped")
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 4: full_uri
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local full_uri = require("ledge.request").full_uri
assert(full_uri() == "http://localhost/t",
"full_uri should be http://localhost/t")
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 5: accepts_cache
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local accepts_cache = require("ledge.request").accepts_cache
assert(tostring(accepts_cache()) == ngx.req.get_uri_args().c,
"accepts_cache should be " .. ngx.req.get_uri_args().c)
}
}
--- more_headers eval
[
"Cache-Control: no-cache",
"Cache-Control: no-store",
"Pragma: no-cache",
"Cache-Control: no-cache, max-age=60",
"Cache-Control: s-maxage=20, no-cache",
"",
"Cache-Control: max-age=60",
"Cache-Control: max-age=0",
"Pragma: cache",
"Cache-Control: no-cachey",
]
--- request eval
[
"GET /t?c=false",
"GET /t?c=false",
"GET /t?c=false",
"GET /t?c=false",
"GET /t?c=false",
"GET /t?c=true",
"GET /t?c=true",
"GET /t?c=true",
"GET /t?c=true",
"GET /t?c=true"
]
--- no_error_log
[error]
================================================
FILE: t/01-unit/response.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config(extra_lua_config => qq{
function read_body(res)
repeat
local chunk, err = res.body_reader()
if chunk then
ngx.print(chunk)
end
until not chunk
end
});
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Load module
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local res, err = require("ledge.response").new()
assert(not res, "new with empty args should return negatively")
assert(err ~= nil, "err not nil")
local res, err = require("ledge.response").new({})
assert(not res, "new with empty handler should return negatively")
assert(err ~= nil, "err not nil")
local res, err = require("ledge.response").new({redis = {} })
assert(not res, "new with empty handler redis should return negatively")
assert(err ~= nil, "err not nil")
local handler = require("ledge").create_handler()
local redis = require("ledge").create_redis_connection()
handler.redis = redis
local res, err = require("ledge.response").new(handler)
assert(res and not err, "response object should be created without error")
local ok, err = pcall(function()
res.foo = "bar"
end)
assert(not ok, "setting unknown field should error")
assert(string.find(err, "attempt to create new field foo"),
"err should be 'attempt to create new field foo'")
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 2: set_body
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local handler = require("ledge").create_handler()
local redis = require("ledge").create_redis_connection()
handler.redis = redis
local res, err = require("ledge.response").new(handler)
read_body(res) -- will be empty
res:set_body("foo")
read_body(res) -- will print foo
}
}
--- request
GET /t
--- response_body: foo
--- no_error_log
[error]
=== TEST 3: filter_body_reader
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local handler = require("ledge").create_handler()
local redis = require("ledge").create_redis_connection()
handler.redis = redis
require("ledge.response").set_debug(true)
local res, err = require("ledge.response").new(handler)
res:set_body("foo")
-- turns foo to moo
function get_cow_filter(reader)
return coroutine.wrap(function()
repeat
local chunk, err = reader()
if chunk then
coroutine.yield(ngx.re.gsub(chunk, "f", "m"))
end
until not chunk
end)
end
-- turns moo to boo
function get_sad_filter(reader)
return coroutine.wrap(function()
repeat
local chunk, err = reader()
if chunk then
coroutine.yield(ngx.re.gsub(chunk, "m", "b"))
end
until not chunk
end)
end
res:filter_body_reader("cow", get_cow_filter(res.body_reader))
res:filter_body_reader("sad", get_sad_filter(res.body_reader))
local ok, err = pcall(res.filter_body_reader, res, "bad", "foo")
assert(not ok and string.find(err, "filter must be a function"),
"error shoudl contain 'filter must be a function'")
read_body(res)
}
}
--- request
GET /t
=== TEST 4: is_cacheable
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local handler = require("ledge").create_handler()
local redis = require("ledge").create_redis_connection()
handler.redis = redis
require("ledge.response").set_debug(true)
local res, err = require("ledge.response").new(handler)
assert(not res:is_cacheable())
res.header = {
["Cache-Control"] = "max-age=60",
}
assert(res:is_cacheable())
res.header = {
["Cache-Control"] = "max-age=60",
["Pragma"] = "no-cache",
}
assert(not res:is_cacheable())
res.header = {
["Cache-Control"] = "s-maxage=60, private",
}
assert(not res:is_cacheable())
res.header = {
["Cache-Control"] = "max-age=60, no-store",
}
assert(not res:is_cacheable())
res.header = {
["Cache-Control"] = "max-age=60, no-cache",
}
assert(not res:is_cacheable())
res.header = {
["Cache-Control"] = "max-age=60, no-cache=X-Foo",
}
assert(res:is_cacheable())
res.header = {
["Cache-Control"] = "max-age=60",
["Vary"] = "*",
}
assert(not res:is_cacheable())
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 5: ttl
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local handler = require("ledge").create_handler()
local redis = require("ledge").create_redis_connection()
handler.redis = redis
require("ledge.response").set_debug(true)
local res, err = require("ledge.response").new(handler)
assert(res:ttl() == 0, "ttl should be 0")
res.header = {
["Expires"] = ngx.http_time(ngx.time() + 10)
}
assert(res:ttl() == 10, "Expires was 10 seconds in the future")
res.header["Cache-Control"] = "max-age=20"
assert(res:ttl() == 20, "max-age overrides to 20 seconds")
res.header["Cache-Control"] = "s-maxage=30"
assert(res:ttl() == 30, "s-maxage overrides to 30 seconds")
res.header["Cache-Control"] = "max-age=20, s-maxage=30"
assert(res:ttl() == 30, "s-maxage still overrides to 30 seconds")
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 6: save / read / set_and_save
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local handler = require("ledge").create_handler()
local redis = require("ledge").create_redis_connection()
handler.redis = redis
local res, err = require("ledge.response").new(handler)
res.uri = "http://example.com"
res.status = 200
local ok, err = res:save(60)
assert(ok and not err, "res should save without err")
local res2, err = require("ledge.response").new(handler)
local ok, err = res2:read()
assert(ok and not err, "res2 should save without err")
assert(res2.uri == "http://example.com", "res2 uri")
res2.header["X-Save-Me"] = "ok"
res2:save(60)
local res3, err = require("ledge.response").new(handler)
res3:read()
assert(res3.header["X-Save-Me"] == "ok", "res3 headers")
local ok, err = res3:set_and_save("size", 99)
assert(ok and not err, "set_and_save should return positively")
assert(res3.size == 99, "res3.size should be 99")
local res4, err = require("ledge.response").new(handler)
res4:read()
assert(res4.size == 99, "res3.size should be 99")
local ok, err = res4:set_and_save(nil, 2)
assert(not ok and err, "set_and_save should fail with bad params")
}
}
--- request
GET /t
--- error_log
set_and_save(): ERR wrong number of arguments for 'hset' command
=== TEST 7: read differentiates between redis failure and broken cache entry
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local handler = require("ledge").create_handler()
local redis = require("ledge").create_redis_connection()
handler.redis = redis
local res, err = require("ledge.response").new(handler)
-- Ensure entry exists
res.uri = "http://example.com"
res.status = 200
res.size = 1
assert(res:save(60), "res should save without err")
-- Break entities
redis:del(handler:cache_key_chain().entities)
local ok, err = res:read()
assert(ok == nil and not err, "read should return no error with broken entities")
-- Break headers
redis:del( handler:cache_key_chain().headers)
local ok, err = res:read()
assert(ok == nil and not err, "read should return no error with broken headers")
-- Missing main key
redis:del( handler:cache_key_chain().main)
local ok, err = res:read()
assert(ok == nil and not err, "read should return no error with missing main key")
-- Break Redis instance
res.redis.hgetall = function() return ngx.null end
local ok, err = res:read()
assert(ok or not err, "read should return error on redis error")
handler.cache_key_chain = function() return nil, "Dummy" end
local ok, err = res:read()
assert(ok == nil and err == "Dummy", "read should return error when failing to get the key chain")
}
}
--- request
GET /t
--- error_code: 200
--- no_error_log
[error]
=== TEST 8: save should replace the has_esi flag
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local handler = require("ledge").create_handler()
local redis = require("ledge").create_redis_connection()
handler.redis = redis
local res, err = require("ledge.response").new(handler)
res.uri = "http://example.com"
res.status = 200
local ok, err = res:save(60)
assert(ok and not err, "res should save without err")
res:set_and_save("has_esi", "dummy")
local res2, err = require("ledge.response").new(handler)
local ok, err = res2:read()
assert(ok and not err, "res2 should save without err")
assert(res2.uri == "http://example.com", "res2 uri")
assert(res2.has_esi == "dummy", "res2 has_esi")
res2.header["X-Save-Me"] = "ok"
res2:save(60)
local res3, err = require("ledge.response").new(handler)
res3:read()
assert(res3.header["X-Save-Me"] == "ok", "res3 headers")
assert(res3.has_esi == false, "res3 has_esi: "..tostring(res3.has_esi))
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 9: Parse vary header
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local encode = require("cjson").encode
local handler = require("ledge").create_handler()
local redis = require("ledge").create_redis_connection()
handler.redis = redis
local res, err = require("ledge.response").new(handler)
local tests = {
{
hdr = nil,
res = nil,
msg = "Nil header, nil spec",
},
{
hdr = "",
res = nil,
msg = "Empty header, nil spec",
},
{
hdr = "foo",
res = {"foo"},
msg = "Single field",
},
{
hdr = "Foo",
res = {"foo"},
msg = "Single field - case",
},
{
hdr = "fOo,bar,Baz",
res = {"bar","baz","foo"},
msg = "Multi field",
},
{
hdr = "fOo, bar , Baz",
res = {"bar","baz","foo"},
msg = "Multi field - whitespace",
},
{
hdr = "bar,baz,foo",
res = {"bar","baz","foo"},
msg = "Multi field - sort1",
},
{
hdr = "foo,baz,bar",
res = {"bar","baz","foo"},
msg = "Multi field - sort2",
},
{
hdr = "foo, bar, bar, foo, baz",
res = {"bar","baz","foo"},
msg = "De-duplicate",
},
{
hdr = {"foo", "Bar", "Baz, Qux"},
res = {"bar", "baz", "foo", "qux"},
msg = "Multiple vary headers",
},
{
hdr = {"foo, bar", "foo", "bar, Qux", "bar, Foo"},
res = {"bar", "foo", "qux"},
msg = "Multiple vary headers - deduplicate",
},
}
for _, t in ipairs(tests) do
res.header["Vary"] = t["hdr"]
local vary_spec = res:parse_vary_header()
ngx.log(ngx.DEBUG, "-----------------------------------------------")
ngx.log(ngx.DEBUG, "header: ", encode(t["hdr"]))
ngx.log(ngx.DEBUG, "spec: ", encode(vary_spec))
ngx.log(ngx.DEBUG, "expected: ", encode(t["res"]))
if type(t["res"]) == "table" then
for i, v in ipairs(t["res"]) do
assert(vary_spec[i] == v, t["msg"])
end
else
assert(vary_spec == t["res"], t["msg"])
end
end
}
}
--- request
GET /t
--- no_error_log
[error]
================================================
FILE: t/01-unit/stale.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config();
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: can_serve_stale
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local can_serve_stale = require("ledge.stale").can_serve_stale
local args = ngx.req.get_uri_args()
local res = {
header = {
["Cache-Control"] = args.rescc,
},
remaining_ttl = tonumber(args.ttl),
}
assert(tostring(can_serve_stale(res)) == ngx.req.get_uri_args().stale,
"can_serve_stale should be " .. ngx.req.get_uri_args().stale)
}
}
--- more_headers eval
[
"",
"Cache-Control: max-stale=60",
"Cache-Control: max-stale=60",
"Cache-Control: max-stale=60",
"Cache-Control: max-stale=9",
]
--- request eval
[
"GET /t?rescc=&ttl=0&stale=false",
"GET /t?rescc=&ttl=0&stale=true",
"GET /t?rescc=must-revalidate&ttl=0&stale=false",
"GET /t?rescc=proxy-revalidate&ttl=0&stale=false",
"GET /t?rescc=&ttl=-10&stale=false",
]
--- no_error_log
[error]
=== TEST 2: verify_stale_conditions
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local verify_stale_conditions =
require("ledge.stale").verify_stale_conditions
local args = ngx.req.get_uri_args()
local res = {
header = {
["Cache-Control"] = ngx.req.get_headers().x_res_cache_control,
["Age"] = ngx.req.get_headers().x_res_age,
},
remaining_ttl = tonumber(args.ttl),
}
local token = ngx.req.get_uri_args().token
local stale = ngx.req.get_uri_args().stale
assert(tostring(verify_stale_conditions(res, token)) == stale,
"verify_stale_conditions should be " .. stale)
if token == "stale-while-revalidate" then
local can_serve_stale_while_revalidate =
require("ledge.stale").can_serve_stale_while_revalidate
assert(tostring(can_serve_stale_while_revalidate(res)) == stale,
"can_serve_stale_while_revalidate should be " .. stale)
elseif token == "stale-if-error" then
local can_serve_stale_if_error =
require("ledge.stale").can_serve_stale_if_error
assert(tostring(can_serve_stale_if_error(res)) == stale,
"can_serve_stale_if_error should be " .. stale)
end
}
}
--- more_headers eval
[
"",
"Cache-Control: stale-while-revalidate=60",
"X-Res-Cache-Control: stale-while-revalidate=60",
"Cache-Control: min-fresh=10
X-Res-Cache-Control: stale-while-revalidate=60",
"Cache-Control: max-age=10, stale-while-revalidate=60
X-Res-Age: 5",
"Cache-Control: max-age=4, stale-while-revalidate=60
X-Res-Age: 5",
"Cache-Control: max-stale=10, stale-while-revalidate=60",
"Cache-Control: max-stale=60, stale-while-revalidate=60",
]
--- request eval
[
"GET /t?token=stale-while-revalidate&stale=false",
"GET /t?token=stale-while-revalidate&stale=true",
"GET /t?token=stale-while-revalidate&stale=true",
"GET /t?token=stale-while-revalidate&stale=false",
"GET /t?token=stale-while-revalidate&stale=true",
"GET /t?token=stale-while-revalidate&stale=false",
"GET /t?token=stale-while-revalidate&stale=false",
"GET /t?token=stale-while-revalidate&stale=true",
]
--- no_error_log
[error]
================================================
FILE: t/01-unit/state_machine.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config();
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Load module
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
assert(require("ledge.state_machine"),
"state machine module should include without error")
assert(require("ledge.state_machine.events"),
"events module should include without error")
assert(require("ledge.state_machine.pre_transitions"),
"pre_transitions module should include without error")
assert(require("ledge.state_machine.states"),
"events module should include without error")
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 2: Prove station machine compiles
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local events = require("ledge.state_machine.events")
local pre_transitions = require("ledge.state_machine.pre_transitions")
local states = require("ledge.state_machine.states")
local actions = require("ledge.state_machine.actions")
for ev,t in pairs(events) do
for _,trans in ipairs(t) do
-- Check states
for _,kw in ipairs { "when", "after", "begin" } do
if trans[kw] then
if "function" ~= type(states[trans[kw]]) then
ngx.say("State '", trans[kw], "' requested during ",
ev, " is not defined")
end
end
end
-- Check "in_case" previous event
if trans["in_case"] then
if not events[trans["in_case"]] then
ngx.say("Event '", trans["in_case"],
"' filtered for but not in transition table")
end
end
-- Check actions
if trans["but_first"] then
local action = trans["but_first"]
if type(action) == "table" then
for _,ac in ipairs(action) do
if "function" ~= type(actions[ac]) then
ngx.say("Action '", ac, "' called during ", ev,
" is not defined")
end
end
else
if "function" ~= type(actions[action]) then
ngx.say("Action '", action, "' called during ", ev,
" is not defined")
end
end
end
end
end
for t,v in pairs(pre_transitions) do
if "function" ~= type(states[t]) then
ngx.say("Pre-transitions defined for missing state '", t, "'")
end
if type(v) ~= "table" or #v == 0 then
ngx.say("No pre-transition actions defined for '", t, "'")
else
for _,action in ipairs(v) do
if "function" ~= type(actions[action]) then
ngx.say("Pre-transition action '", action,
"' is not defined")
end
end
end
end
for state, v in pairs(states) do
local found = false
for ev, t in pairs(events) do
for _, trans in ipairs(t) do
if trans["begin"] == state then
found = true
end
end
end
if found == false then
ngx.say("State '", state, "' is never transitioned to")
end
end
local states_file = "lib/ledge/state_machine/states.lua"
local handler_file = "lib/ledge/handler.lua"
-- event in a table
local events_called = {}
for _, file in ipairs({ states_file, handler_file }) do
assert(io.open(file, "r"),
"Could not find states.lua (are you running from the root dir?")
-- Run luac to extract self:e(event) calls by event name
local cmd = "luac -p -l " .. file
cmd = cmd .. [[ | grep -A2 'SELF .* "e"' | awk '{print $7}']]
cmd = cmd .. [[ | grep "\".*\""]]
local f, err = io.popen(cmd, "r")
-- For each call, check the event being triggered exists, and place the
repeat
local event = f:read('*l')
if event then
event = ngx.re.gsub(event, "\"", "") -- remove quotes
events_called[event] = true
if not events[event] then
ngx.say("Event '", event, "' is called but does not exist")
end
end
until not event
f:close()
end
for event, t_table in pairs(events) do
if not events_called[event] then
ngx.say("Event '", event, "' exits but is never called")
end
end
ngx.say("OK")
}
}
--- request
GET /t
--- response_body
OK
--- no_error_log
[error]
================================================
FILE: t/01-unit/storage.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config(extra_lua_config => qq{
-- Define storage backends here, and add requests to each test
-- with backend= params.
backends = {
redis = {
module = "ledge.storage.redis",
params = {
redis_connector_params = {
url = REDIS_URL,
},
},
bad_params = {
redis_connector_params = {
url = REDIS_URL,
foobar = "broken!"
},
}
},
redis_notransact = {
module = "ledge.storage.redis",
params = {
redis_connector_params = {
url = REDIS_URL,
connection_is_proxied = true,
},
supports_transactions = false,
},
bad_params = {
redis_connector_params = {
url = REDIS_URL,
foobar = "broken!"
},
supports_transactions = false,
}
},
}
function get_backend(backend)
local config = backends[backend]
if backend == "redis_notransact" then
-- stub out transactional redis functions to error
require("resty.redis").multi = function(...)
error("called transactional function 'multi'")
end
require("resty.redis").exec = function(...)
error("called transactional function 'exec'")
end
require("resty.redis").discard = function(...)
error("called transactional function 'discard'")
end
end
return config
end
-- Utility returning an iterator over given chunked data
function get_source(data)
local index = 0
return function()
index = index + 1
if data[index] then
return data[index][1], data[index][2], data[index][3]
end
end
end
-- Utility returning an iterator over given chunked data, but which
-- fails (simulating storage connection failure) at fail_pos iteration.
function get_and_fail_source(data, fail_pos, storage)
local index = 0
return function()
index = index + 1
if index == fail_pos then
storage.redis:close()
end
if data[index] then
return data[index][1], data[index][2], data[index][3]
end
end
end
-- Utility returning an iterator over given chunked data, but which
-- fails (simulating upstream timeout) at fail_pos iteration.
function get_and_fail_upstream_source(data, fail_pos)
local index = 0
return function()
index = index + 1
if index == fail_pos then return nil, "timeout" end
if data[index] then
return data[index][1], data[index][2], data[index][3]
end
end
end
-- Utility to read the body as is serving
function sink(iterator)
repeat
local chunk, err, has_esi = iterator()
if chunk then
ngx.say(chunk, ":", err, ":", tostring(has_esi))
end
until not chunk
end
-- Utilitu to report success and the size written
function success_handler(bytes_written)
ngx.say("wrote ", bytes_written, " bytes")
end
-- Utility to report the onfailure event was called
function failure_handler(reason)
ngx.say(reason)
end
-- Response object stub
_res = {}
local _mt = { __index = _res }
function _res.new(entity_id)
return setmetatable({
entity_id = entity_id,
body_reader = function() return nil end,
}, _mt)
end
});
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Load connect and close without errors.
--- http_config eval: $::HttpConfig
--- config
location /storage {
content_by_lua_block {
local config = get_backend(ngx.req.get_uri_args()["backend"])
local storage = require(config.module).new()
assert(storage:connect(config.params),
"storage:connect should return positively")
assert(storage:close(),
"storage:close() should return positively")
ngx.print(ngx.req.get_uri_args()["backend"], " OK")
}
}
--- request eval
[
"GET /storage?backend=redis",
"GET /storage?backend=redis_notransact",
]
--- response_body eval
[
"redis OK",
"redis_notransact OK",
]
--- no_error_log
[error]
=== TEST 2: Write entity, read it back
--- http_config eval: $::HttpConfig
--- config
location /storage {
content_by_lua_block {
local backend = ngx.req.get_uri_args()["backend"]
local config = get_backend(backend)
local storage = require(config.module).new()
assert(storage:connect(config.params),
"storage:connect should return positively")
local res = _res.new("00002-" .. backend)
res.body_reader = get_source({
{ "CHUNK 1", nil, false },
{ "CHUNK 2", nil, true },
{ "CHUNK 3", nil, false },
})
assert(not storage:exists(res.entity_id),
"entity should not exist")
-- Attach the writer, and run sink
res.body_reader = storage:get_writer(
res, 60,
success_handler,
failure_handler
)
sink(res.body_reader)
assert(storage:exists(res.entity_id),
"entity should exist")
-- Attach the reader, and run sink
res.body_reader = storage:get_reader(res)
sink(res.body_reader)
assert(storage:close(),
"storage:close should return positively")
}
}
--- request eval
[
"GET /storage?backend=redis",
"GET /storage?backend=redis_notransact",
]
--- response_body eval
[
"CHUNK 1:nil:false
CHUNK 2:nil:true
CHUNK 3:nil:false
wrote 21 bytes
CHUNK 1:nil:false
CHUNK 2:nil:true
CHUNK 3:nil:false
",
"CHUNK 1:nil:false
CHUNK 2:nil:true
CHUNK 3:nil:false
wrote 21 bytes
CHUNK 1:nil:false
CHUNK 2:nil:true
CHUNK 3:nil:false
",
]
--- no_error_log
[error]
=== TEST 3: Fail to write entity larger than max_size
--- http_config eval: $::HttpConfig
--- config
location /storage {
content_by_lua_block {
local backend = ngx.req.get_uri_args()["backend"]
local config = get_backend(backend)
local storage = require(config.module).new()
config.params.max_size = 8
assert(storage:connect(config.params),
"storage:connect should return positively")
local res = _res.new("00003-" .. backend)
res.body_reader = get_source({
{ "123", nil, false },
{ "456", nil, true },
{ "789", nil, false },
})
assert(not storage:exists(res.entity_id),
"entity should not exist")
-- Attach the writer, and run sink
res.body_reader = storage:get_writer(
res, 60,
success_handler,
failure_handler
)
sink(res.body_reader)
-- Prove entity wasn't written
assert(not storage:exists(res.entity_id),
"entity should not exist")
assert(storage:close(),
"storage:close should return positively")
}
}
--- request eval
[
"GET /storage?backend=redis",
"GET /storage?backend=redis_notransact",
]
--- response_body eval
[
"123:nil:false
456:nil:true
789:nil:false
body is larger than 8 bytes
",
"123:nil:false
456:nil:true
789:nil:false
body is larger than 8 bytes
",
]
--- no_error_log
[error]
=== TEST 4: Test zero length bodies are not written
--- http_config eval: $::HttpConfig
--- config
location /storage {
content_by_lua_block {
local backend = ngx.req.get_uri_args()["backend"]
local config = get_backend(backend)
local storage = require(config.module).new()
assert(storage:connect(config.params),
"storage:connect should return positively")
local res = _res.new("00004-" .. backend)
assert(not storage:exists(res.entity_id),
"entity should not exist")
-- Attach the writer, and run sink
res.body_reader = storage:get_writer(
res, 60,
success_handler,
failure_handler
)
sink(res.body_reader)
-- Prove entity wasn't written
assert(not storage:exists(res.entity_id),
"entity should not exist")
assert(storage:close(),
"storage:close should return positively")
}
}
--- request eval
[
"GET /storage?backend=redis",
"GET /storage?backend=redis_notransact",
]
--- response_body eval
[
"wrote 0 bytes
",
"wrote 0 bytes
",
]
--- no_error_log
[error]
=== TEST 5: Test write fails and abort handler called if conn to storage is interrupted
--- http_config eval: $::HttpConfig
--- config
location /storage {
lua_socket_log_errors off;
content_by_lua_block {
-- TODO find a way to check the error log, for no transact redis,
-- where the optimistic call to redis:del() fails due to closed conn.
local backend = ngx.req.get_uri_args()["backend"]
local config = get_backend(backend)
local storage = require(config.module).new()
assert(storage:connect(config.params),
"storage:connect should return positively")
local res = _res.new("00005-" .. backend)
-- Load source but fail on second chunk
res.body_reader = get_and_fail_source({
{ "123", nil, false },
{ "456", nil, true },
{ "789", nil, true },
}, 2, storage)
assert(not storage:exists(res.entity_id),
"entity should not yet exist")
-- Attach the writer, and run sink
res.body_reader = storage:get_writer(
res, 60,
success_handler,
failure_handler
)
sink(res.body_reader)
if backend ~= "redis_notransact" then
-- Prove entity wasn't written (rolled back)
assert(not storage:exists(res.entity_id),
"entity should still not exist")
end
}
}
--- request eval
[
"GET /storage?backend=redis",
"GET /storage?backend=redis_notransact",
]
--- response_body eval
[
"123:nil:false
456:nil:true
789:nil:true
error writing: closed
",
"123:nil:false
456:nil:true
789:nil:true
error writing: closed
",
]
=== TEST 5b: Test write fails and abort handler called if upstream errors
--- http_config eval: $::HttpConfig
--- config
location /storage {
lua_socket_log_errors off;
content_by_lua_block {
local backend = ngx.req.get_uri_args()["backend"]
local config = get_backend(backend)
local storage = require(config.module).new()
assert(storage:connect(config.params),
"storage:connect should return positively")
local res = _res.new("00005b-" .. backend)
-- Load source but fail on second chunk
res.body_reader = get_and_fail_upstream_source({
{ "123", nil, false },
{ "456", nil, true },
{ "789", nil, true },
}, 2)
assert(not storage:exists(res.entity_id),
"entity should not yet exist")
-- Attach the writer, and run sink
res.body_reader = storage:get_writer(
res, 60,
success_handler,
failure_handler
)
sink(res.body_reader)
if backend ~= "redis_notransact" then
-- Prove entity wasn't written (rolled back)
assert(not storage:exists(res.entity_id),
"entity should still not exist")
end
}
}
--- request eval
[
"GET /storage?backend=redis",
"GET /storage?backend=redis_notransact",
]
--- response_body eval
[
"123:nil:false
upstream error: timeout
",
"123:nil:false
upstream error: timeout
",
]
=== TEST 6: Write entity with short exiry, test keys expire
--- http_config eval: $::HttpConfig
--- config
location /storage {
content_by_lua_block {
local backend = ngx.req.get_uri_args()["backend"]
local config = get_backend(backend)
local storage = require(config.module).new()
assert(storage:connect(config.params),
"storage:connect should return positively")
local res = _res.new("00006-" .. backend)
res.body_reader = get_source({
{ "123", nil, false },
{ "456", nil, true },
{ "789", nil, false },
})
assert(not storage:exists(res.entity_id),
"entity should not exist")
-- Attach the writer, and run sink
res.body_reader = storage:get_writer(
res, 1,
success_handler,
failure_handler
)
sink(res.body_reader)
assert(storage:exists(res.entity_id),
"entity should exist")
ngx.sleep(2)
assert(not storage:exists(res.entity_id),
"entity should not exist")
assert(storage:close(),
"storage:close should return positively")
}
}
--- request eval
[
"GET /storage?backend=redis",
"GET /storage?backend=redis_notransact",
]
--- response_body eval
[
"123:nil:false
456:nil:true
789:nil:false
wrote 9 bytes
",
"123:nil:false
456:nil:true
789:nil:false
wrote 9 bytes
",
]
--- no_error_log
[error]
=== TEST 7: Test maxmem keys are cleaned up when transactions are not available
--- http_config eval: $::HttpConfig
--- config
location /storage {
content_by_lua_block {
local backend = ngx.req.get_uri_args()["backend"]
local config = get_backend(backend)
local storage = require(config.module).new()
config.params.max_size = 8
-- Turn off atomicity
config.params.supports_transactions = false
assert(storage:connect(config.params),
"storage:connect should return positively")
local res = _res.new("00007-" .. backend)
-- Load source but fail on second chunk
res.body_reader = get_source({
{ "123", nil, false },
{ "456", nil, true },
{ "789", nil, true },
}, storage)
assert(not storage:exists(res.entity_id),
"entity should not exist")
-- Attach the writer, and run sink
res.body_reader = storage:get_writer(
res, 60,
success_handler,
failure_handler
)
sink(res.body_reader)
-- Prove entity wasn't written (rolled back)
assert(not storage:exists(res.entity_id),
"entity should not exist")
}
}
--- request eval
[
"GET /storage?backend=redis",
"GET /storage?backend=redis_notransact",
]
--- response_body eval
[
"123:nil:false
456:nil:true
789:nil:true
body is larger than 8 bytes
",
"123:nil:false
456:nil:true
789:nil:true
body is larger than 8 bytes
",
]
--- no_error_log
[error]
=== TEST 8: Keys will remain on failure when transactions are not available
--- http_config eval: $::HttpConfig
--- config
location /storage {
lua_socket_log_errors off;
content_by_lua_block {
local backend = ngx.req.get_uri_args()["backend"]
local config = get_backend(backend)
local storage = require(config.module).new()
config.params.supports_transactions = false
assert(storage:connect(config.params),
"storage:connect should return positively")
local res = _res.new("00008-" .. backend)
-- Load source but fail on second chunk
res.body_reader = get_and_fail_source({
{ "123", nil, false },
{ "456", nil, true },
{ "789", nil, true },
}, 2, storage)
assert(not storage:exists(res.entity_id),
"entity should not exist")
-- Attach the writer, and run sink
res.body_reader = storage:get_writer(
res, 60,
success_handler,
failure_handler
)
sink(res.body_reader)
-- Reconnect
assert(storage:connect(config.params),
"storage:connect should return positively")
-- Prove it still exists (could not be cleaned up)
assert(storage:exists(res.entity_id),
"entity should exist")
}
}
--- request eval
[
"GET /storage?backend=redis",
"GET /storage?backend=redis_notransact",
]
--- response_body eval
[
"123:nil:false
456:nil:true
789:nil:true
error writing: closed
",
"123:nil:false
456:nil:true
789:nil:true
error writing: closed
",
]
--- error_log eval
["closed"]
=== TEST 9: Close connection and then reconnect and re-read
--- http_config eval: $::HttpConfig
--- config
location /storage {
content_by_lua_block {
local backend = ngx.req.get_uri_args()["backend"]
local config = get_backend(backend)
local storage = require(config.module).new()
assert(storage:connect(config.params),
"storage:connect should return positively")
local res = _res.new("00009-" .. backend)
res.body_reader = get_source({
{ "123", nil, false },
{ "456", nil, true },
{ "789", nil, false },
})
assert(not storage:exists(res.entity_id),
"entity should not exist")
-- Attach the writer, and run sink
res.body_reader = storage:get_writer(
res, 60,
success_handler,
failure_handler
)
sink(res.body_reader)
assert(storage:exists(res.entity_id),
"entity should exist")
assert(storage:close(),
"storage:close should return positively")
assert(storage:connect(config.params),
"storage:connect should return positively")
assert(storage:exists(res.entity_id),
"entity should exist")
res.body_reader = storage:get_reader(res)
sink(res.body_reader)
assert(storage:close(),
"storage:close should return positively")
}
}
--- request eval
[
"GET /storage?backend=redis",
"GET /storage?backend=redis_notransact",
]
--- response_body eval
[
"123:nil:false
456:nil:true
789:nil:false
wrote 9 bytes
123:nil:false
456:nil:true
789:nil:false
",
"123:nil:false
456:nil:true
789:nil:false
wrote 9 bytes
123:nil:false
456:nil:true
789:nil:false
",
]
--- no_error_log
[error]
=== TEST 10: Entities can be deleted
--- http_config eval: $::HttpConfig
--- config
location /storage {
content_by_lua_block {
local backend = ngx.req.get_uri_args()["backend"]
local config = get_backend(backend)
local storage = require(config.module).new()
assert(storage:connect(config.params),
"storage:connect should return positively")
local res = _res.new("00010-" .. backend)
res.body_reader = get_source({
{ "123", nil, false },
{ "456", nil, false },
{ "789", nil, false },
})
assert(not storage:exists(res.entity_id),
"entity should not exist")
-- Attach the writer, and run sink
res.body_reader = storage:get_writer(
res, 99,
success_handler,
failure_handler
)
sink(res.body_reader)
assert(storage:exists(res.entity_id),
"entity should exist")
assert(storage:delete(res.entity_id),
"entity should delete without error")
assert(not storage:exists(res.entity_id),
"entity should not exist")
local ok, err = storage:delete("foo")
assert(ok == false and err == nil,
"deleting foo entity should return false without error")
assert(storage:close(),
"storage:close should return positively")
}
}
--- request eval
[
"GET /storage?backend=redis",
"GET /storage?backend=redis_notransact",
]
--- response_body eval
[
"123:nil:false
456:nil:false
789:nil:false
wrote 9 bytes
",
"123:nil:false
456:nil:false
789:nil:false
wrote 9 bytes
",
]
--- no_error_log
[error]
=== TEST 11: set_ttl / get_ttl
--- http_config eval: $::HttpConfig
--- config
location /storage {
content_by_lua_block {
local backend = ngx.req.get_uri_args()["backend"]
local config = get_backend(backend)
local storage = require(config.module).new()
assert(storage:connect(config.params),
"storage:connect should return positively")
local res = _res.new("00011-" .. backend)
res.body_reader = get_source({
{ "123", nil, false },
{ "456", nil, false },
{ "789", nil, false },
})
assert(not storage:exists(res.entity_id),
"entity should not exist")
-- Attach the writer, and run sink
res.body_reader = storage:get_writer(
res, 99,
success_handler,
failure_handler
)
sink(res.body_reader)
assert(storage:exists(res.entity_id),
"entity should exist")
local ttl, err = storage:get_ttl("foo")
assert(ttl == false and err == "entity does not exist",
"getting ttl on foo entity should return false without error")
local ttl = storage:get_ttl(res.entity_id)
assert(ttl and ttl <= 99 and ttl >= 98,
"entity ttl should be roughly 99")
local ok, err = storage:set_ttl("foo", 1)
assert(ok == false and err == "entity does not exist",
"setting ttl on foo entity should return false without error")
assert(storage:set_ttl(res.entity_id, 1),
"setting ttl should return positively")
ngx.sleep(2)
assert(not storage:exists(res.entity_id),
"entity should have expired")
assert(storage:close(),
"storage:close should return positively")
}
}
--- request eval
[
"GET /storage?backend=redis",
"GET /storage?backend=redis_notransact",
]
--- response_body eval
[
"123:nil:false
456:nil:false
789:nil:false
wrote 9 bytes
",
"123:nil:false
456:nil:false
789:nil:false
wrote 9 bytes
",
]
--- no_error_log
[error]
=== TEST 12: Bad params should return an error
--- http_config eval: $::HttpConfig
--- config
location /storage {
content_by_lua_block {
local config = get_backend(ngx.req.get_uri_args()["backend"])
local storage = require(config.module).new()
local ok, err = storage:connect(config.bad_params)
assert(not ok,
"storage:connect should not return positively")
assert(type(err) == "string",
"storage:connect should return an error string")
ngx.log(ngx.INFO, err)
ngx.print(ngx.req.get_uri_args()["backend"], " OK")
}
}
--- request eval
[
"GET /storage?backend=redis",
"GET /storage?backend=redis_notransact",
]
--- response_body eval
[
"redis OK",
"redis_notransact OK",
]
--- no_error_log
[error]
=== TEST 13: Handler run with bad config should return an error
--- http_config eval: $::HttpConfig
--- config
location /storage {
content_by_lua_block {
local config = get_backend(ngx.req.get_uri_args()["backend"])
local ok, err = require("ledge").create_handler({
storage_driver = config.module,
storage_driver_config = config.bad_params
}):run()
assert(ok == nil and err ~= nil,
"run should return negatively with an error")
ngx.print(ngx.req.get_uri_args()["backend"], " OK")
}
}
--- request eval
[
"GET /storage?backend=redis",
"GET /storage?backend=redis_notransact",
]
--- response_body eval
[
"redis OK",
"redis_notransact OK",
]
--- no_error_log
[error]
================================================
FILE: t/01-unit/tag_parser.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config(extra_lua_config => qq{
function print_next(tag, before, after)
if not tag.closing then
tag.closing = {}
end
ngx.say(tag.closing.from)
ngx.say(tag.closing.to)
ngx.say(tag.closing.tag)
ngx.say(tag.whole)
ngx.say(tag.contents)
ngx.say(before)
ngx.say(after)
end
function strip_whitespace(content)
return ngx.re.gsub(content, [[\\s*\\n\\s*]], "")
end
function check_regex(regex, content, msg)
local to, from = ngx.re.find(content, regex, "soj")
assert(from ~= nil and to ~= nil, (msg or "regex should match"))
end
});
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Load module
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local tag_parser = assert(require("ledge.esi.tag_parser"),
"module should load without errors")
local parser = tag_parser.new("Content")
assert(parser, "tag_parser.new should return positively")
ngx.say("OK")
}
}
--- request
GET /t
--- error_code: 200
--- no_error_log
[error]
--- response_body
OK
=== TEST 2: Find next tag
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local tag_parser = assert(require("ledge.esi.tag_parser"),
"module should load without errors")
local parser = tag_parser.new("content-beforeinsidecontent-after")
assert(parser, "tag_parser.new should return positively")
local tag, before, after = parser:next("foo")
assert(tag, "next should find a tag")
print_next(tag, before, after)
}
}
--- request
GET /t
--- error_code: 200
--- no_error_log
[error]
--- response_body
26
31
inside
inside
content-before
content-after
=== TEST 3: Default next tag finds esi
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local tag_parser = assert(require("ledge.esi.tag_parser"),
"module should load without errors")
local parser = tag_parser.new("content-beforeinsidecontent-afterlast")
assert(parser, "tag_parser.new should return positively")
local tag, before, after = parser:next()
assert(tag, "next should find a tag")
print_next(tag, before, after)
ngx.say("##########")
local tag, before, after = parser:next()
assert(tag, "next should find a tag")
print_next(tag, before, after)
}
}
--- request
GET /t
--- error_code: 200
--- no_error_log
[error]
--- response_body
30
39
inside
inside
content-before
content-afterlast
##########
68
70
-->
comment
content-after
last
=== TEST 4: Find tag with attributes
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local tag_parser = assert(require("ledge.esi.tag_parser"),
"module should load without errors")
local parser = tag_parser.new("content-beforeinsidecontent-after")
assert(parser, "tag_parser.new should return positively")
local tag, before, after = parser:next("foo")
assert(tag, "next should find a tag")
print_next(tag, before, after)
}
}
--- request
GET /t
--- error_code: 200
--- no_error_log
[error]
--- response_body
54
59
inside
attr='value' attr2='value2'>inside
content-before
content-after
=== TEST 4: Find nested tags
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local tag_parser = assert(require("ledge.esi.tag_parser"),
"module should load without errors")
local content = strip_whitespace([[
content-before
inside-foo
inside-bar
after-bar
inside-foo-2
content-after
]])
local parser = tag_parser.new(content)
assert(parser, "tag_parser.new should return positively")
local tag, before, after = parser:next("foo")
assert(tag, "next should find a tag")
print_next(tag, before, after)
ngx.say("#######")
local parser = tag_parser.new(content)
assert(parser, "tag_parser.new should return positively")
local tag, before, after = parser:next("bar")
assert(tag, "next should find a tag")
print_next(tag, before, after)
}
}
--- request
GET /t
--- error_code: 200
--- no_error_log
[error]
--- response_body
83
88
inside-fooinside-barafter-barinside-foo-2
inside-fooinside-barafter-barinside-foo-2
content-before
content-after
#######
45
50
inside-bar
inside-bar
content-beforeinside-foo
after-barinside-foo-2content-after
=== TEST 5: Pattern functions return valid regex
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local tag_parser = assert(require("ledge.esi.tag_parser"),
"module should load without errors")
local ok, err = ngx.re.find("", tag_parser.open_pattern("tag"))
assert(err == nil, "open_pattern should return a valid regex")
local ok, err = ngx.re.find("", tag_parser.close_pattern("tag"))
assert(err == nil, "open_pattern should return a valid regex")
local ok, err = ngx.re.find("", tag_parser.either_pattern("tag"))
assert(err == nil, "open_pattern should return a valid regex")
ngx.say("OK")
}
}
--- request
GET /t
--- error_code: 200
--- no_error_log
[error]
--- response_body
OK
=== TEST 5: open pattern matches
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local tag_parser = assert(require("ledge.esi.tag_parser"),
"module should load without errors")
local regex = tag_parser.open_pattern("tag")
ngx.log(ngx.DEBUG, regex)
local checks = {
"start end", "simple tag",
"start end", "simple closed tag",
"start asdfsd end", "simple closed tag with content",
"start end", "simple tag whitespace",
"start end", "self-closing tag",
"start end", "self-closing tag whitespace",
"start end", "simple tag with attribute",
'start end', "simple tag with attribute (single-quote)",
'start end', "simple tag with attribute (numeric)",
'start end', "simple tag with attribute (special chars)",
}
for i=1,#checks,2 do
check_regex(regex, checks[i], "open_pattern should match "..checks[i+1])
end
ngx.say("OK")
}
}
--- request
GET /t
--- error_code: 200
--- no_error_log
[error]
--- response_body
OK
=== TEST 6: close pattern matches
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local tag_parser = assert(require("ledge.esi.tag_parser"),
"module should load without errors")
local regex = tag_parser.close_pattern("tag")
ngx.log(ngx.DEBUG, regex)
local checks = {
"start end", "simple tag",
"start end", "simple closed tag",
"start asdfsd end", "simple closed tag with content",
"start end", "simple tag with whitespace",
}
for i=1,#checks,2 do
check_regex(regex, checks[i], "close_pattern should match "..checks[i+1])
end
ngx.say("OK")
}
}
--- request
GET /t
--- error_code: 200
--- no_error_log
[error]
--- response_body
OK
=== TEST 7: either pattern matches
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local tag_parser = assert(require("ledge.esi.tag_parser"),
"module should load without errors")
local regex = tag_parser.either_pattern("tag")
ngx.log(ngx.DEBUG, regex)
local checks = {
"start end", "simple tag",
"start end", "simple closed tag",
"start asdfsd end", "simple closed tag with content",
"start end", "simple tag whitespace",
"start end", "self-closing tag",
"start end", "self-closing tag whitespace",
"start end", "simple tag with attribute",
'start end', "simple tag with attribute (single-quote)",
'start end', "simple tag with attribute (numeric)",
'start end', "simple tag with attribute (special chars)",
"start end", "simple tag",
"start end", "simple closed tag",
"start asdfsd end", "simple closed tag with content",
"start end", "simple tag with whitespace",
}
for i=1,#checks,2 do
check_regex(regex, checks[i], "either_pattern should match "..checks[i+1])
end
ngx.say("OK")
}
}
--- request
GET /t
--- error_code: 200
--- no_error_log
[error]
--- response_body
OK
================================================
FILE: t/01-unit/util.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config(extra_lua_config => qq{
TEST_NGINX_PORT = $LedgeEnv::nginx_port
});
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: string.randomhex
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local randomhex = require("ledge.util").string.randomhex
-- lengths
assert(#randomhex(10) == 10, "randomhex(10) length should be 10")
assert(#randomhex(42) == 42, "randomhex(42) length should be 42")
-- apparent randomness
assert(randomhex(10) ~= randomhex(10),
"random hex strings should differ")
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 2: mt.fixed_field_metatable
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local fixed_field_metatable =
require("ledge.util").mt.fixed_field_metatable
-- Error if new field creation attempted
local t = setmetatable({ a = 1, c = 3 }, fixed_field_metatable)
local ok, err = pcall(
function() t.b = 2 end,
"attempt to create new field b"
)
assert(string.find(err, "attempt to create new field b"),
"err should contain 'attempt to create new field b'")
-- Error if non existent field dereferenced
local t = setmetatable({ a = 1, c = 3 }, fixed_field_metatable)
local ok, err = pcall(
function() local a = t.b end,
"attempt to create new field b"
)
assert(string.find(err, "field b does not exist"),
"err should contain 'field b does not exist'")
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 3: mt.get_fixed_field_metatable_proxy
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local get_fixed_field_metatable_proxy =
require("ledge.util").mt.get_fixed_field_metatable_proxy
local defaults = { a = 1, b = 2, c = 3 }
-- Error if new field creation attempted
local t = setmetatable(
{ b = 4 },
get_fixed_field_metatable_proxy(defaults)
)
assert(t.a == 1, "t.a should be 1")
assert(t.b == 4, "t.b should be 4")
assert(t.c == 3, "t.c should be 3")
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 4: mt.get_callable_fixed_field_metatable
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local get_callable_fixed_field_metatable =
require("ledge.util").mt.get_callable_fixed_field_metatable
local func =
function(t, field)
return t[field]
end
-- Error if new field creation attempted
local t = setmetatable(
{ a = 1, b = 2, c = 3 },
get_callable_fixed_field_metatable(func)
)
assert(t("a") == 1, "t('a') should return 1")
assert(t("b") == 2, "t('b') should return 2")
assert(t("c") == 3, "t('c') should return 3")
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 5: table.copy
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local tbl_copy = require("ledge.util").table.copy
local mt = { __index = function(t, k) return "no index" end }
local t = {
a = 1,
b = 2,
c = {
x = 10,
y = 11,
z = setmetatable({ 1, 2, 3 }, mt),
}
}
local copy = tbl_copy(t)
-- Values copied
assert(t ~= copy, "copy should not equal t")
assert(copy.a == 1, "copy.a should be 1")
assert(type(copy.c) == "table", "copy.c should be a table")
assert(copy.c ~= t.c, "copy.c should not equal t.c")
assert(copy.c.x == 10, "copy.c.x should be 10")
assert(type(copy.c.z) == "table", "copy.c.z should be a table")
assert(copy.c.z ~= t.c.z, "copy.z.a. should not equal t.c.z")
assert(copy.c.z[1] == 1, "copy.c.z[1] should be 1")
assert(copy.c.z[3] == 3, "copy.c.z[3] should be 3")
-- Metatables copied
assert(getmetatable(copy) == nil, "getmetatable(copy) should be nil")
assert(getmetatable(copy.c.z) ~= getmetatable(t.c.z),
"copy.c.z metatable should not equal t.c.z metatable")
assert(getmetatable(copy.c.z).__index == getmetatable(t.c.z).__index,
"copy.c.z __index metamethod should equal t.c.z __index metamethod")
assert(copy.c.z[4] == "no index", "copy.c.z[3] should be 'no index'")
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 6: table.copy_merge_defaults
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local tbl_copy_merge_defaults =
require("ledge.util").table.copy_merge_defaults
local fixed_field_metatable =
require("ledge.util").mt.fixed_field_metatable
local defaults = {
a = 1,
c = 3,
d = {
x = 10,
z = 12,
},
e = {
a = 1,
c = 3,
},
}
local t = {
a = false,
b = 2,
e = {
b = 2,
},
}
local copy = tbl_copy_merge_defaults(t, defaults)
-- Basic copy merge
assert(copy ~= t, "copy should not equal t")
assert(getmetatable(copy) == nil, "copy should not have a metatable")
assert(copy.a == false, "copy.a should be false")
assert(copy.b == 2, "copy.b should be 2")
assert(copy.c == 3, "copy.c should be 3")
-- Child table in defaults is merged
assert(copy.d ~= defaults.d, "copy.d should not equal defaults d")
assert(copy.d.x == 10, "copy.d.x should be 10")
assert(copy.d.z == 12, "copy.d.z should be 12")
-- Child table in both is merged
assert(copy.e ~= defaults.e, "copy.e should not equal defaults e")
assert(copy.e.a == 1, "copy.e.a should be 1")
assert(copy.e.b == 2, "copy.e.b should be 2")
assert(copy.e.c == 3, "copy.e.c should be 3")
-- Same again, but with defaults being "fixed field"
local defaults = setmetatable({
a = 1,
b = 2,
c = 3,
d = setmetatable({
x = 10,
y = 11,
z = 12,
}, fixed_field_metatable)
}, fixed_field_metatable)
local t_good = {
b = 6,
d = {
z = 42,
},
}
-- Copy is merged properly
local copy = tbl_copy_merge_defaults(t_good, defaults)
assert(copy.a == 1, "copy.a should be 1")
assert(copy.b == 6, "copy.b should be 6")
assert(copy.c == 3, "copy.c should be 3")
assert(copy.d ~= defaults.d and copy.d ~= t_good.d,
"copy.d should not equal defaults.d or t_good.d")
assert(getmetatable(copy) == nil, "getmetatable(copy) should be nil")
-- Copy merge should fail
local t_bad_1 = {
a = 4,
foo = "bar",
}
local ok, err = pcall(function()
tbl_copy_merge_defaults(t_bad_1, defaults)
end)
assert(string.find(err, "field foo does not exist"),
"error 'field foo does not exist' should be thrown")
-- Copy merge should fail on inner table
local t_bad_2 = {
a = 4,
d = {
x = 10,
foo = "bar",
},
}
local ok, err = pcall(function()
tbl_copy_merge_defaults(t_bad_1, defaults)
end)
assert(string.find(err, "field foo does not exist"),
"error 'field foo does not exist' should be thrown")
-- Copy merge with a nil user table gives us a copy of defaults
local t, err = tbl_copy_merge_defaults(nil, defaults)
assert(t ~= nil and t.a == 1,
"merging a nil user table should still return defaults")
assert(t ~= defaults, "defaults should be copied by value")
local t, err = tbl_copy_merge_defaults(t_good, nil)
assert(t ~= nil and t.b == 6,
"merging with nil defaults should still return user t")
assert(t ~= t_good, "user t should be copied by value")
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 7: string.split
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local str_split = require("ledge.util").string.split
local str1 = "comma, separated, string, "
local t = str_split(str1, ",")
assert(#t == 4, "#t should be 4")
assert(t[1] == "comma", "t[1] should be 'comma'")
assert(t[2] == " separated", "t[2] should be ' separated'")
assert(t[3] == " string", "t[3] should be ' string'")
assert(t[4] == " ", "t[4] should be ' '")
local t = str_split(str1, ", ")
assert(#t == 3, "#t should be 3")
assert(t[1] == "comma", "t[1] should be 'comma'")
assert(t[2] == "separated", "t[2] should be ' separated'")
assert(t[3] == "string", "t[3] should be ' string'")
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 8: coroutine.wrap
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local co_wrap = require("ledge.util").coroutine.wrap
local co = co_wrap(
function()
for i = 1, 10 do
coroutine.yield(i)
end
end
)
function run()
local res = ""
repeat
local num = co()
if num then
res = res .. num .. "-"
end
until not num
res = res .. "finished"
return res
end
assert(run() == "1-2-3-4-5-6-7-8-9-10-finished",
"run() should return 1-2-3-4-5-6-7-8-9-10-finished")
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 8b: coroutine.wrap errors
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local co_wrap = require("ledge.util").coroutine.wrap
local co = co_wrap(
function()
for i = 1, 10 do
if i == 5 then
error("BOOM")
end
coroutine.yield(i)
end
end
)
function run()
local res = ""
repeat
local num, err = co()
if num then
res = res .. num .. "-"
elseif err then
ngx.log(ngx.DEBUG, "Coroutine error: ", err)
end
until not num
res = res .. "finished"
return res
end
assert(run() == "1-2-3-4-finished", "Error was yielded!")
}
}
--- request
GET /t
--- error_log
Coroutine error:
BOOM
--- no_error_log
Error was yielded!
=== TEST 9: get_hostname
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local get_hostname = require("ledge.util").get_hostname
assert(string.lower(get_hostname()) == string.lower(ngx.var.hostname),
"get_hostname "..tostring(get_hostname()).." should be "..ngx.var.hostname)
}
}
--- request
GET /t
--- no_error_log
[error]
=== TEST 10: append_server_port
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local append_server_port = require("ledge.util").append_server_port
local host = "example.com"
local default = host..":"..TEST_NGINX_PORT
assert(append_server_port(host) == default,
"append_server_port should be "..default)
}
}
--- request
GET /t
--- no_error_log
[error]
================================================
FILE: t/01-unit/validation.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config();
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: must_revalidate
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local must_revalidate = require("ledge.validation").must_revalidate
local res = {
header = {
["Cache-Control"] = ngx.req.get_headers().x_res_cache_control,
["Age"] = ngx.req.get_headers().x_res_age,
},
}
local result = ngx.req.get_uri_args().result
assert(tostring(must_revalidate(res)) == result,
"must_revalidate should be " .. result)
}
}
--- more_headers eval
[
"",
"Cache-Control: max-age=0",
"Cache-Control: max-age=1
X-Res-Age: 1",
"Cache-Control: max-age=1
X-Res-Age: 2",
"X-Res-Cache-Control: must-revalidate",
"X-Res-Cache-Control: proxy-revalidate",
]
--- request eval
[
"GET /t?&result=false",
"GET /t?&result=true",
"GET /t?&result=false",
"GET /t?&result=true",
"GET /t?&result=true",
"GET /t?&result=true",
]
--- no_error_log
[error]
=== TEST 2: can_revalidate_locally
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local can_revalidate_locally =
require("ledge.validation").can_revalidate_locally
local result = ngx.req.get_uri_args().result
assert(tostring(can_revalidate_locally()) == result,
"can_revalidate_locally should be " .. result)
}
}
--- more_headers eval
[
"",
"If-None-Match:" ,
"If-None-Match: foo",
"If-Modified-Since: Sun, 06 Nov 1994 08:49:37 GMT",
"If-Modified-Since:",
"If-Modified-Since: foo",
]
--- request eval
[
"GET /t?&result=false",
"GET /t?&result=false",
"GET /t?&result=true",
"GET /t?&result=true",
"GET /t?&result=false",
"GET /t?&result=false",
]
--- no_error_log
[error]
=== TEST 3: is_valid_locally
--- http_config eval: $::HttpConfig
--- config
location /t {
content_by_lua_block {
local is_valid_locally = require("ledge.validation").is_valid_locally
local res = {
header = {
["Last-Modified"] = ngx.req.get_headers().x_res_last_modified,
["Etag"] = ngx.req.get_headers().x_res_etag,
},
}
local result = ngx.req.get_uri_args().result
assert(tostring(is_valid_locally(res)) == result,
"is_valid_locally should be " .. result)
}
}
--- more_headers eval
[
"",
"If-Modified-Since: Sun, 05 Nov 1994 08:49:37 GMT
X-Res-Last-Modified: Sun, 06 Nov 1994 08:48:37 GMT",
"If-Modified-Since: Sun, 06 Nov 1994 08:49:37 GMT
X-Res-Last-Modified: Sun, 06 Nov 1994 08:48:37 GMT",
"If-Modified-Since: Sun, 06 Nov 1994 08:49:38 GMT
X-Res-Last-Modified: Sun, 06 Nov 1994 08:48:37 GMT",
"If-Modified-Since: Sun, 06 Nov 1994 08:49:36 GMT
X-Res-Last-Modified: Sun, 06 Nov 1994 08:49:37 GMT",
"If-None-Match: foo
X-Res-Etag: foo",
"If-None-Match: foo
X-Res-Etag: bar",
"If-None-Match: foo",
"X-Res-Etag: bar",
]
--- request eval
[
"GET /t?&result=false",
"GET /t?&result=false",
"GET /t?&result=true",
"GET /t?&result=true",
"GET /t?&result=false",
"GET /t?&result=true",
"GET /t?&result=false",
"GET /t?&result=false",
"GET /t?&result=false",
]
--- no_error_log
[error]
================================================
FILE: t/01-unit/worker.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config();
our $HttpConfig_Test6 = LedgeEnv::http_config(extra_lua_config => qq{
foo = 1
package.loaded["ledge.job.test"] = {
perform = function(job)
foo = foo + 1
return true
end
}
}, run_worker => 1);
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Load module without errors.
--- http_config eval: $::HttpConfig
--- config
location /worker_1 {
echo "OK";
}
--- request
GET /worker_1
--- no_error_log
[error]
=== TEST 2: Create worker with default config
--- http_config eval: $::HttpConfig
--- config
location /worker_2 {
echo "OK";
}
--- request
GET /worker_2
--- no_error_log
[error]
=== TEST 4: Create worker with bad config key
--- http_config eval
qq {
lua_package_path "./lib/?.lua;;";
init_by_lua_block {
if $LedgeEnv::test_coverage == 1 then
require("luacov.runner").init()
end
}
init_worker_by_lua_block {
require("ledge.worker").new({
foo = "one",
})
}
}
--- config
location /worker_4 {
echo "OK";
}
--- request
GET /worker_4
--- error_log
field foo does not exist
=== TEST 5: Run workers without errors
--- http_config eval
qq {
lua_package_path "./lib/?.lua;;";
init_by_lua_block {
if $LedgeEnv::test_coverage == 1 then
require("luacov.runner").init()
end
}
init_worker_by_lua_block {
require("ledge.worker").new():run()
}
}
--- config
location /worker_5 {
echo "OK";
}
--- request
GET /worker_5
--- no_error_log
[error]
=== TEST 6: Push a job and confirm it runs
--- http_config eval: $::HttpConfig_Test6
--- config
location /worker_6 {
content_by_lua_block {
local qless = assert(require("resty.qless").new({
get_redis_client = require("ledge").create_qless_connection
}))
local jid = assert(qless.queues["ledge_gc"]:put("ledge.job.test"))
ngx.sleep(2)
ngx.say(foo)
local job = qless.jobs:get(jid)
ngx.say(job.state)
}
}
--- request
GET /worker_6
--- response_body
2
complete
--- timeout: 5
--- no_error_log
[error]
================================================
FILE: t/02-integration/age.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config();
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: No calculated Age header on cache MISS.
--- http_config eval: $::HttpConfig
--- config
location /age_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
origin_mode = require("ledge").ORIGIN_MODE_AVOID
}):run()
}
}
location /age {
more_set_headers "Cache-Control public, max-age=600";
echo "OK";
}
--- request
GET /age_prx
--- response_headers
Age:
--- no_error_log
[error]
=== TEST 2: Age header on cache HIT
--- http_config eval: $::HttpConfig
--- config
location /age_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
origin_mode = require("ledge").ORIGIN_MODE_AVOID
}):run()
}
}
location /age {
more_set_headers "Cache-Control public, max-age=600";
echo "OK";
}
--- request
GET /age_prx
--- response_headers_like
Age: \d+
--- no_error_log
[error]
================================================
FILE: t/02-integration/cache.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config(extra_nginx_config => qq{
lua_shared_dict ledge_test 1m;
}, run_worker => 1);
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Subzero request; X-Cache: MISS
--- http_config eval: $::HttpConfig
--- config
location /cache_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /cache {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("TEST 1")
}
}
--- request
GET /cache_prx
--- response_headers_like
X-Cache: MISS from .*
--- response_body
TEST 1
--- no_error_log
[error]
=== TEST 1b: Subzero request; X-Cache: MISS is prepended
--- http_config eval: $::HttpConfig
--- config
location /cache_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /cache {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.header["X-Cache"] = "HIT from example.com"
ngx.say("TEST 1")
}
}
--- request
GET /cache_prx?append
--- response_headers_like
X-Cache: MISS from .+, HIT from example.com
--- response_body
TEST 1
--- no_error_log
[error]
=== TEST 2: Hot request; X-Cache: HIT
--- http_config eval: $::HttpConfig
--- config
location /cache_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- request
GET /cache_prx
--- response_headers_like
X-Cache: HIT from .*
--- response_body
TEST 1
--- no_error_log
[error]
=== TEST 3: No-cache request; X-Cache: MISS
--- http_config eval: $::HttpConfig
--- config
location /cache_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /cache {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("TEST 3")
}
}
--- more_headers
Cache-Control: no-cache
--- request
GET /cache_prx
--- response_headers_like
X-Cache: MISS from .*
--- response_body
TEST 3
--- no_error_log
[error]
=== TEST 3b: No-cache request with extension; X-Cache: MISS
--- http_config eval: $::HttpConfig
--- config
location /cache_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /cache {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("TEST 3b")
}
}
--- more_headers
Cache-Control: no-cache, stale-if-error=1234
--- request
GET /cache_prx
--- response_headers_like
X-Cache: MISS from .*
--- response_body
TEST 3b
--- no_error_log
[error]
=== TEST 3c: No-store request; X-Cache: MISS
--- http_config eval: $::HttpConfig
--- config
location /cache_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /cache {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("TEST 3c")
}
}
--- more_headers
Cache-Control: no-store
--- request
GET /cache_prx
--- response_headers_like
X-Cache: MISS from .*
--- response_body
TEST 3c
--- no_error_log
[error]
=== TEST 4a: PURGE
--- http_config eval: $::HttpConfig
--- config
location /cache_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- request
PURGE /cache_prx
--- error_code: 200
--- no_error_log
[error]
=== TEST 4b: Cold request (expired but known); X-Cache: MISS
--- http_config eval: $::HttpConfig
--- config
location /cache_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /cache {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("TEST 4")
}
}
--- request
GET /cache_prx
--- response_headers_like
X-Cache: MISS from .*
--- response_body
TEST 4
--- no_error_log
[error]
=== TEST 4c: Clean up
--- http_config eval: $::HttpConfig
--- config
location /cache_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- more_headers
X-Purge: delete
--- request
PURGE /cache_prx
--- error_code: 200
--- no_error_log
[error]
=== TEST 6a: Prime a resource into cache
--- http_config eval: $::HttpConfig
--- config
location /cache_6_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /cache_6 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("TEST 6")
}
}
--- request
GET /cache_6_prx
--- response_headers_like
X-Cache: MISS from .*
--- response_body
TEST 6
--- no_error_log
[error]
=== TEST 6b: Revalidate - now the response is a non-cacheable 404.
--- http_config eval: $::HttpConfig
--- config
location /cache_6_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /cache_6 {
content_by_lua_block {
ngx.status = 404
ngx.header["Cache-Control"] = "no-cache"
ngx.say("TEST 6b")
}
}
--- more_headers
Cache-Control: no-cache
--- request
GET /cache_6_prx
--- response_headers_like
X-Cache:
--- response_body
TEST 6b
--- error_code: 404
--- no_error_log
[error]
=== TEST 6c: Confirm all keys have been removed
--- http_config eval: $::HttpConfig
--- config
location /cache_6 {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
local redis = require("ledge").create_redis_connection()
handler.redis = redis
local key_chain = handler:cache_key_chain()
local res, err = redis:keys(key_chain.root .. "*")
if res then
ngx.say("Numkeys: ", #res)
end
}
}
--- request
GET /cache_6
--- response_body
Numkeys: 0
--- no_error_log
[error]
=== TEST 7: only-if-cached should return 504 on cache miss
--- http_config eval: $::HttpConfig
--- config
location /cache_7_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /cache_7 {
content_by_lua_block {
ngx.say("TEST 7")
}
}
--- more_headers
Cache-Control: only-if-cached
--- request
GET /cache_7_prx
--- error_code: 504
--- no_error_log
[error]
=== TEST 8: min-fresh reduces calculated ttl
--- http_config eval: $::HttpConfig
--- config
location /cache_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /cache {
content_by_lua_block {
ngx.say("TEST 8")
}
}
--- more_headers
Cache-Control: min-fresh=9999
--- request
GET /cache_prx
--- response_body
TEST 8
--- no_error_log
[error]
=== TEST 9a: Prime a 404 response into cache; X-Cache: MISS
--- http_config eval: $::HttpConfig
--- config
location /cache_9_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /cache_9 {
content_by_lua_block {
ngx.status = ngx.HTTP_NOT_FOUND
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("TEST 9")
}
}
--- request
GET /cache_9_prx
--- response_headers_like
X-Cache: MISS from .*
--- response_body
TEST 9
--- error_code: 404
--- no_error_log
[error]
=== TEST 9b: Test we still have 404; X-Cache: HIT
--- http_config eval: $::HttpConfig
--- config
location /cache_9_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- request
GET /cache_9_prx
--- response_headers_like
X-Cache: HIT from .*
--- response_body
TEST 9
--- error_code: 404
--- no_error_log
[error]
=== TEST 11: Prime with HEAD into cache (no body); X-Cache: MISS
--- http_config eval: $::HttpConfig
--- config
location /cache_11_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /cache_11 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
}
}
--- more_headers
Cache-Control: no-cache
--- request
HEAD /cache_11_prx
--- response_headers_like
X-Cache: MISS from .*
--- response_body
--- error_code: 200
--- no_error_log
[error]
=== TEST 11b: Check HEAD request did not cache
--- http_config eval: $::HttpConfig
--- config
location /cache_11_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /cache_11 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
}
}
--- request
HEAD /cache_11_prx
--- response_headers_like
X-Cache: MISS from .*
--- response_body
--- error_code: 200
--- no_error_log
[error]
=== TEST 12: Prime 301 into cache with no body; X-Cache: MISS
--- http_config eval: $::HttpConfig
--- config
location /cache_12_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /cache_12 {
content_by_lua_block {
ngx.status = 301
ngx.header["Cache-Control"] = "max-age=3600"
ngx.header["Location"] = "http://example.com"
}
}
--- request
GET /cache_12_prx
--- response_headers_like
X-Cache: MISS from .*
--- response_body
--- error_code: 301
--- no_error_log
[error]
=== TEST 12b: Check 301 request cached with no body
--- http_config eval: $::HttpConfig
--- config
location /cache_12_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- request
GET /cache_12_prx
--- response_headers_like
X-Cache: HIT from .*
--- response_body
--- error_code: 301
--- no_error_log
[error]
=== TEST 13: Subzero request; X-Cache: MISS
--- http_config eval: $::HttpConfig
--- config
location /cache_13_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /cache_13 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.header["X-Custom-Hdr"] = "foo"
ngx.say("TEST 13")
}
}
--- request
GET /cache_13_prx
--- response_headers_like
X-Cache: MISS from .*
--- response_headers
X-Custom-Hdr: foo
--- response_body
TEST 13
--- no_error_log
[error]
=== TEST 13b: Forced cache update
--- http_config eval: $::HttpConfig
--- config
location /cache_13_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /cache_13 {
content_by_lua_block {
-- Should override ALL headers from TEST 13
ngx.header["Cache-Control"] = "max-age=3600"
ngx.header["X-Custom-Hdr2"] = "bar"
ngx.say("TEST 13b")
}
}
--- request
GET /cache_13_prx
--- more_headers
Cache-Control: no-cache
--- response_headers_like
X-Cache: MISS from .*
--- response_headers
X-Custom-Hdr2: bar
--- response_body
TEST 13b
--- no_error_log
[error]
=== TEST 13c: Cache hit - Headers are overriden not appended to
--- http_config eval: $::HttpConfig
--- config
location /cache_13_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /cache_13 {
content_by_lua_block {
ngx.say("TEST 13b")
ngx.log(ngx.ERR, "Never run")
}
}
--- request
GET /cache_13_prx
--- response_headers_like
X-Cache: HIT from .*
--- response_headers
X-Custom-Hdr2: bar
--- raw_response_headers_unlike: .*X-Custom-Hdr: foo.*
--- no_error_log
[error]
--- response_body
TEST 13b
=== TEST 14: Cache-Control no-cache=#field and private=#field, drop headers from cache
--- http_config eval: $::HttpConfig
--- config
location /cache_14_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /cache_14 {
content_by_lua_block {
ngx.header["Cache-Control"] = {
'max-age=3600, private="XTest"',
'no-cache="X-Test2"'
}
ngx.header["XTest"] = "foo"
ngx.header["X-test2"] = "bar"
ngx.say("TEST 14")
}
}
--- request
GET /cache_14_prx
--- response_headers_like
X-Cache: MISS from .*
--- response_headers
XTest: foo
X-Test2: bar
--- response_body
TEST 14
--- no_error_log
[error]
=== TEST 14b: Cache hit - Headers are not returned from cache
--- http_config eval: $::HttpConfig
--- config
location /cache_14_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /cache_14 {
content_by_lua_block {
ngx.say("TEST 14b")
ngx.log(ngx.ERR, "Never run")
}
}
--- request
GET /cache_14_prx
--- response_headers_like
X-Cache: HIT from .*
--- raw_response_headers_unlike: .*(XTest: foo|X-test2: bar).*
--- no_error_log
[error]
--- response_body
TEST 14
=== TEST 15a: Prime a resource into cache
--- http_config eval: $::HttpConfig
--- config
location /cache_15_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
keep_cache_for = 1,
}):run()
}
}
location /cache_15 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=60"
ngx.header["Vary"] = "Foobar"
ngx.say("TEST 15")
}
}
--- request
GET /cache_15_prx
--- response_headers_like
X-Cache: MISS from .*
--- response_body
TEST 15
--- no_error_log
[error]
=== TEST 15b: Confim all keys exists
--- http_config eval: $::HttpConfig
--- config
location /cache_15_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
local redis = require("ledge").create_redis_connection()
handler.redis = redis
local key_chain = handler:cache_key_chain()
local res, err = redis:keys(key_chain.root .. "*")
if res then
ngx.say("Numkeys: ", #res)
end
-- Sleep longer than keep_cache_for, to prove all keys have ttl assigned
ngx.sleep(3)
local res, err = redis:keys(key_chain.root .. "*")
if res then
ngx.say("Numkeys: ", #res)
end
}
}
--- request
GET /cache_15_prx
--- timeout: 5
--- response_body
Numkeys: 7
Numkeys: 7
--- no_error_log
[error]
=== TEST 16: Prime a resource into cache
--- http_config eval: $::HttpConfig
--- config
location /cache_16_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /cache_16 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=60"
ngx.say("TEST 16")
}
}
--- request
GET /cache_16_prx
--- response_headers_like
X-Cache: MISS from .*
--- response_body
TEST 16
--- no_error_log
[error]
=== TEST 16b: Modified main key aborts transaction and cleans up entity
--- http_config eval: $::HttpConfig
--- config
location /cache_16_check {
content_by_lua_block {
local entity_id = ngx.shared.ledge_test:get("entity_id")
local redis = require("ledge").create_storage_connection()
local ok, err = redis:exists(entity_id)
ngx.print(ok, " ", err)
}
}
location /cache_16_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("before_serve", function(res)
-- Create a new connection
local redis = require("ledge").create_redis_connection()
-- Set a new key on the main key
redis:hset(handler:cache_key_chain().main, "foo", "bar")
ngx.shared.ledge_test:set("entity_id", res.entity_id)
end)
handler:run()
}
}
location /cache_16 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=60"
ngx.print("TEST 16b")
}
}
--- request eval
["GET /cache_16_prx", "GET /cache_16_check"]
--- more_headers
Cache-Control: no-cache
--- response_headers_like eval
["X-Cache: MISS from .*", ""]
--- response_body eval
["TEST 16b", "false nil"]
--- wait: 3
--- no_error_log
[error]
=== TEST 16c: Modified main key aborts transaction - HIT
--- http_config eval: $::HttpConfig
--- config
location /cache_16_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /cache_16 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=60"
ngx.say("TEST 16b")
}
}
--- request
GET /cache_16_prx
--- response_headers_like
X-Cache: HIT from .*
--- response_body
TEST 16
--- no_error_log
[error]
=== TEST 16d: Partial entry misses
--- http_config eval: $::HttpConfig
--- config
location /cache_16_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
local redis = require("ledge").create_redis_connection()
handler.redis = redis
local key_chain = handler:cache_key_chain()
-- Break entities
redis:del(handler:cache_key_chain().entities)
handler:run()
}
}
location /cache_16 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=60"
ngx.say("TEST 16d")
}
}
--- request
GET /cache_16_prx
--- response_headers_like
X-Cache: MISS from .*
--- response_body
TEST 16d
--- no_error_log
[error]
=== TEST 17: Main key is completely overriden
--- http_config eval: $::HttpConfig
--- config
location /cache_17_modify {
rewrite ^(.*)_modify$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
local redis = require("ledge").create_redis_connection()
handler.redis = redis
local key = handler:cache_key_chain().main
-- Add new field to main key
redis:hset(key, "bogus_field", "foobar")
-- Print result from redis
local main, err = redis:hgetall(key)
main = redis:array_to_hash(main)
ngx.print(key, " bogus_field: ", main["bogus_field"])
}
}
location /cache_17_check {
rewrite ^(.*)_check$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
local redis = require("ledge").create_redis_connection()
handler.redis = redis
local key = handler:cache_key_chain().main
-- Print result from redis
local main, err = redis:hgetall(key)
main = redis:array_to_hash(main)
ngx.print(key, " bogus_field: ", main["bogus_field"])
}
}
location /cache_17_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /cache_17 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=60"
ngx.print("TEST 17")
}
}
--- request eval
[
"GET /cache_17_prx",
"GET /cache_17_modify",
"GET /cache_17_prx",
"GET /cache_17_check",
]
--- more_headers eval
[
"",
"",
"Cache-Control: no-cache",
"",
]
--- response_body eval
[
"TEST 17",
"ledge:cache:http:localhost:/cache_17:#::main bogus_field: foobar",
"TEST 17",
"ledge:cache:http:localhost:/cache_17:#::main bogus_field: nil",
]
--- no_error_log
[error]
================================================
FILE: t/02-integration/collapsed_forwarding.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config(extra_nginx_config => qq{
lua_shared_dict test 1m;
lua_check_client_abort on;
}, run_worker => 1);
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1a: Prime cache (collapsed forwardind requires having seen a previously cacheable response)
--- http_config eval: $::HttpConfig
--- config
location /collapsed_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /collapsed {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("OK")
}
}
--- request
GET /collapsed_prx
--- repsonse_body
OK
--- no_error_log
[error]
=== TEST 1b: Purge cache
--- http_config eval: $::HttpConfig
--- config
location /collapsed {
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- request
PURGE /collapsed
--- error_code: 200
--- no_error_log
[error]
=== TEST 2: Concurrent COLD requests accepting cache
--- http_config eval: $::HttpConfig
--- config
location /concurrent_collapsed {
rewrite_by_lua_block {
ngx.shared.test:set("test_2", 0)
}
echo_location_async "/collapsed_prx";
echo_sleep 0.05;
echo_location_async "/collapsed_prx";
}
location /collapsed_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
enable_collapsed_forwarding = true,
}):run()
}
}
location /collapsed {
content_by_lua_block {
ngx.sleep(0.1)
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("OK " .. ngx.shared.test:incr("test_2", 1))
}
}
--- request
GET /concurrent_collapsed
--- error_code: 200
--- response_body
OK 1
OK 1
=== TEST 3a: Purge cache
--- http_config eval: $::HttpConfig
--- config
location /collapsed {
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- request
PURGE /collapsed
--- error_code: 200
--- no_error_log
[error]
=== TEST 3b: Concurrent COLD requests with collapsing turned off
--- http_config eval: $::HttpConfig
--- config
location /concurrent_collapsed {
rewrite_by_lua_block {
ngx.shared.test:set("test_3", 0)
}
echo_location_async '/collapsed_prx';
echo_sleep 0.05;
echo_location_async '/collapsed_prx';
}
location /collapsed_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
enable_collapsed_forwarding = false,
}):run()
}
}
location /collapsed {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("OK " .. ngx.shared.test:incr("test_3", 1))
ngx.sleep(0.1)
}
}
--- request
GET /concurrent_collapsed
--- error_code: 200
--- response_body
OK 1
OK 2
=== TEST 4a: Purge cache
--- http_config eval: $::HttpConfig
--- config
location /collapsed {
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- request
PURGE /collapsed
--- error_code: 200
--- no_error_log
[error]
=== TEST 4b: Concurrent COLD requests not accepting cache
--- http_config eval: $::HttpConfig
--- config
location /concurrent_collapsed {
rewrite_by_lua_block {
ngx.shared.test:set("test_4", 0)
}
echo_location_async '/collapsed_prx';
echo_sleep 0.05;
echo_location_async '/collapsed_prx';
}
location /collapsed_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
enable_collapsed_forwarding = true,
}):run()
}
}
location /collapsed {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("OK " .. ngx.shared.test:incr("test_4", 1))
ngx.sleep(0.1)
}
}
--- more_headers
Cache-Control: no-cache
--- request
GET /concurrent_collapsed
--- error_code: 200
--- response_body
OK 1
OK 2
=== TEST 5a: Purge cache
--- http_config eval: $::HttpConfig
--- config
location /collapsed {
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- request
PURGE /collapsed
--- error_code: 200
--- no_error_log
[error]
=== TEST 5b: Concurrent COLD requests, response no longer cacheable
--- http_config eval: $::HttpConfig
--- config
location /concurrent_collapsed {
rewrite_by_lua_block {
ngx.shared.test:set("test_5", 0)
}
echo_location_async '/collapsed_prx';
echo_sleep 0.05;
echo_location_async '/collapsed_prx';
}
location /collpased_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
enable_collapsed_forwarding = true,
}):run()
}
}
location /collapsed {
content_by_lua_block {
ngx.header["Cache-Control"] = "no-cache"
ngx.say("OK " .. ngx.shared.test:incr("test_5", 1))
ngx.sleep(0.1)
}
}
--- request
GET /concurrent_collapsed
--- error_code: 200
--- response_body
OK 1
OK 2
=== TEST 6: Concurrent SUBZERO requests
--- http_config eval: $::HttpConfig
--- config
location /concurrent_collapsed_6 {
rewrite_by_lua_block {
ngx.shared.test:set("test_6", 0)
}
echo_location_async '/collapsed_6_prx';
echo_sleep 0.05;
echo_location_async '/collapsed_6_prx';
}
location /collapsed_6_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
enable_collapsed_forwarding = true,
}):run()
}
}
location /collapsed_6 {
content_by_lua_block {
ngx.sleep(0.1)
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("OK " .. ngx.shared.test:incr("test_6", 1))
}
}
--- request
GET /concurrent_collapsed_6
--- error_code: 200
--- response_body
OK 1
OK 2
=== TEST 7a: Prime cache
--- http_config eval: $::HttpConfig
--- config
location /collapsed_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /collapsed {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.header["Etag"] = "test7a"
ngx.say("OK")
}
}
--- request
GET /collapsed_prx
--- repsonse_body
OK
--- no_error_log
[error]
=== TEST 7b: Concurrent conditional requests which accept cache
(i.e. does this work with revalidation)
--- http_config eval: $::HttpConfig
--- config
location /concurrent_collapsed {
rewrite_by_lua_block {
ngx.shared.test:set("test_7", 0)
}
echo_location_async '/collapsed_prx';
echo_sleep 0.05;
echo_location_async '/collapsed_prx';
echo_sleep 1;
}
location /collapsed_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
enable_collapsed_forwarding = true,
}):run()
}
}
location /collapsed {
content_by_lua_block {
ngx.sleep(0.1)
ngx.header["Etag"] = "test7b"
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("OK " .. ngx.shared.test:incr("test_7", 1))
}
}
--- more_headers
Cache-Control: max-age=0
If-None-Match: test7b
--- request
GET /concurrent_collapsed
--- error_code: 200
--- response_body
=== TEST 8a: Prime cache (collapsed forwardind requires having seen a previously cacheable response)
--- http_config eval: $::HttpConfig
--- config
location /collapsed8_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /collapsed8 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("OK")
}
}
--- request eval
["GET /collapsed8_prx", "PURGE /collapsed8_prx"]
--- no_error_log
[error]
=== TEST 8b: Collapse window timed out
--- http_config eval: $::HttpConfig
--- config
location /concurrent_collapsed {
rewrite_by_lua_block {
ngx.shared.test:set("test_8", 0)
}
echo_location_async "/collapsed8_prx";
echo_sleep 0.05;
echo_location_async "/collapsed8_prx";
}
location /collapsed8_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
enable_collapsed_forwarding = true,
collapsed_forwarding_window = 500, -- (ms)
}):run()
}
}
location /collapsed8 {
content_by_lua_block {
ngx.sleep(0.8)
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("OK " .. ngx.shared.test:incr("test_8", 1))
}
}
--- request
GET /concurrent_collapsed
--- error_code: 200
--- response_body
OK 1
OK 2
=== TEST 9: Collapsing with vary
--- http_config eval: $::HttpConfig
--- config
location /prime {
rewrite ^ /collapsed9 break;
content_by_lua_block {
require("ledge.state_machine").set_debug(false)
require("ledge").create_handler({
enable_collapsed_forwarding = true,
}):run()
}
}
location /concurrent_collapsed {
rewrite_by_lua_block {
ngx.shared.test:set("test_9", 0)
}
echo_location_async "/collapsed9_prx";
echo_sleep 0.05;
echo_location_async "/collapsed9_prx";
}
location /collapsed9_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge.state_machine").set_debug(true)
require("ledge").create_handler({
enable_collapsed_forwarding = true,
}):run()
}
}
location /collapsed {
content_by_lua_block {
ngx.sleep(0.1)
local counter = ngx.shared.test:incr("test_9", 1)
ngx.header["Vary"] = "X-Test"
ngx.header["Cache-Control"] = "max-age=3600"
ngx.print("OK " .. tostring(counter))
}
}
--- request eval
[
"GET /prime", "PURGE /prime",
"GET /concurrent_collapsed"
]
--- error_code eval
[200, 200, 200]
--- response_body_like eval
[
"OK nil", ".+",
"OK 1OK 1"
]
=== TEST 10: Collapsing with vary - change in spec
--- http_config eval: $::HttpConfig
--- config
location /prime {
rewrite ^ /collapsed10 break;
content_by_lua_block {
require("ledge.state_machine").set_debug(false)
require("ledge").create_handler({
enable_collapsed_forwarding = false,
}):run()
}
}
location /concurrent_collapsed {
echo_location_async "/collapsed10_prx";
echo_sleep 0.05;
echo_location_async "/collapsed10_prx";
}
location /collapsed10_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge.state_machine").set_debug(true)
require("ledge").create_handler({
enable_collapsed_forwarding = true,
}):run()
}
}
location /collapsed {
content_by_lua_block {
ngx.sleep(0.1)
local counter = ngx.shared.test:incr("test_10", 1, 0)
if counter == 1 then
ngx.header["Vary"] = "X-Test" -- Prime with this
else
ngx.header["Vary"] = "X-Test2"
end
ngx.header["Cache-Control"] = "max-age=3600"
ngx.print("OK " .. tostring(counter))
}
}
--- request eval
[
"GET /prime", "PURGE /prime",
"GET /concurrent_collapsed"
]
--- more_headers eval
[
"X-Test: Foo","X-Test: Foo",
"X-Test: Foo",
]
--- error_code eval
[200, 200, 200]
--- response_body_like eval
[
"OK 1", ".+",
"OK 2OK 2"
]
=== TEST 11: Collapsing with vary - change in spec mismatch
--- http_config eval: $::HttpConfig
--- config
location /prime {
rewrite ^ /collapsed11 break;
content_by_lua_block {
require("ledge.state_machine").set_debug(false)
require("ledge").create_handler({
enable_collapsed_forwarding = false,
}):run()
}
}
location /concurrent_collapsed {
echo_subrequest_async GET "/collapsed11a_prx"; # X-Test: Foo
echo_sleep 0.05;
echo_subrequest_async GET "/collapsed11b_prx"; # X-Test: Foo, X-Test2: Bar
}
location /collapsed11a_prx {
rewrite ^(.*)a_prx$ $1 break;
content_by_lua_block {
require("ledge.state_machine").set_debug(true)
require("ledge").create_handler({
enable_collapsed_forwarding = true,
}):run()
}
}
location /collapsed11b_prx {
rewrite ^(.*)b_prx$ $1 break;
content_by_lua_block {
ngx.req.set_header("X-Test2", "Bar")
require("ledge.state_machine").set_debug(true)
require("ledge").create_handler({
enable_collapsed_forwarding = true,
}):run()
}
}
location /collapsed {
content_by_lua_block {
ngx.sleep(0.1)
local counter = ngx.shared.test:incr("test_11", 1, 0)
if counter == 1 then
ngx.header["Vary"] = "X-Test" -- Prime with this
else
ngx.header["Vary"] = "X-Test2"
end
ngx.header["Cache-Control"] = "max-age=3600"
ngx.print("OK " .. tostring(counter))
}
}
--- request eval
[
"GET /prime", "PURGE /prime",
"GET /concurrent_collapsed"
]
--- more_headers eval
[
"X-Test: Foo","X-Test: Foo",
"X-Test: Foo",
]
--- error_code eval
[200, 200, 200]
--- response_body_like eval
[
"OK 1", ".+",
"OK 2OK 3"
]
================================================
FILE: t/02-integration/esi.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config(extra_nginx_config => qq{
lua_shared_dict test 1m;
lua_check_client_abort on;
if_modified_since off;
}, extra_lua_config => qq{
require("ledge").set_handler_defaults({
esi_enabled = true,
buffer_size = 5, -- Try to trip scanning up with small buffers
})
-- Make all content return valid Surrogate-Control headers
function run(handler)
if not handler then
handler = require("ledge").create_handler()
end
handler:bind("after_upstream_request", function(res)
res.header["Surrogate-Control"] = [[content="ESI/1.0"]]
end)
handler:run()
end
}, run_worker => 1);
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 0: ESI works on slow and fast paths
--- http_config eval: $::HttpConfig
--- config
location /esi_0_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge.state_machine").set_debug(true)
run()
}
}
location /esi_0 {
default_type text/html;
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=60"
ngx.print("Hello")
}
}
--- request eval
[
"GET /esi_0_prx",
"GET /esi_0_prx",
]
--- response_body eval
[
"Hello",
"Hello",
]
--- response_headers_like eval
[
"X-Cache: MISS from .*",
"X-Cache: HIT from .*",
]
--- no_error_log
[error]
=== TEST 1: Single line comments removed
--- http_config eval: $::HttpConfig
--- config
location /esi_1_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
--ledge:config_set("buffer_size", 10)
run()
}
}
location /esi_1 {
default_type text/html;
content_by_lua_block {
ngx.say("")
ngx.say("")
}
}
--- request
GET /esi_1_prx
--- response_body
COMMENTED
COMMENTED
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- no_error_log
[error]
=== TEST 1b: Single line comments removed, esi instructions processed
--- http_config eval: $::HttpConfig
--- config
location /esi_1b_prx {
rewrite ^(.*)_prx$ $1b break;
content_by_lua_block {
run()
}
}
location /esi_1b {
default_type text/html;
content_by_lua_block {
ngx.print("")
}
}
--- request
GET /esi_1b_prx?a=1b
--- response_body: a=1b
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- no_error_log
[error]
=== TEST 2: Multi line comments removed
--- http_config eval: $::HttpConfig
--- config
location /esi_2_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /esi_2 {
default_type text/html;
content_by_lua_block {
ngx.print("")
ngx.say("2")
ngx.say("")
}
}
--- request
GET /esi_2_prx
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
1
2
3
--- no_error_log
[error]
=== TEST 2b: Multi line comments removed, ESI instructions processed
--- http_config eval: $::HttpConfig
--- config
location /esi_2_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /esi_2 {
default_type text/html;
content_by_lua_block {
ngx.print("")
ngx.say("2345")
ngx.say("")
}
}
location /test {
content_by_lua_block {
ngx.print("OK")
}
}
--- request
GET /esi_2_prx?a=1
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
1234 OK
2345
a=1
--- no_error_log
[error]
=== TEST 2c: Multi line escaping comments, nested.
ESI instructions still processed
--- http_config eval: $::HttpConfig
--- config
location /esi_2c_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /esi_2c {
default_type text/html;
content_by_lua_block {
ngx.say("BEFORE")
ngx.print("")
ngx.say("MIDDLE")
ngx.say("$(QUERY_STRING{c})")
ngx.print("-->")
ngx.say("AFTER")
}
}
--- request
GET /esi_2c_prx?a=1&b=2&c=3
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
BEFORE
1
2
MIDDLE
3
AFTER
--- no_error_log
[error]
=== TEST 3: Single line removed.
--- http_config eval: $::HttpConfig
--- config
location /esi_3_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /esi_3 {
default_type text/html;
content_by_lua_block {
ngx.say("START")
ngx.say("REMOVED")
ngx.say("REMOVED")
ngx.say("END")
}
}
--- request
GET /esi_3_prx
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
START
END
--- no_error_log
[error]
=== TEST 4: Multi line removed.
--- http_config eval: $::HttpConfig
--- config
location /esi_4_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /esi_4 {
default_type text/html;
content_by_lua_block {
ngx.say("1")
ngx.say("")
ngx.say("2")
ngx.say("")
ngx.say("3")
ngx.say("4")
ngx.say("")
ngx.say("5")
ngx.say("")
ngx.say("6")
}
}
--- request
GET /esi_4_prx
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
1
3
4
6
--- no_error_log
[error]
=== TEST 5: Include fragment
--- http_config eval: $::HttpConfig
--- config
location /esi_5_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /fragment_1 {
content_by_lua_block {
ngx.say("FRAGMENT: ", ngx.req.get_uri_args()["a"] or "")
}
}
location /esi_5 {
default_type text/html;
content_by_lua_block {
ngx.say("1")
ngx.print([[]])
ngx.say("2")
ngx.print([[]])
ngx.print("3")
ngx.print([[]])
}
}
--- request
GET /esi_5_prx
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
1
FRAGMENT:
2
FRAGMENT: 2
3FRAGMENT: 3
--- no_error_log
[error]
=== TEST 5b: Test fragment always issues GET and only inherits correct
req headers
--- http_config eval: $::HttpConfig
--- config
location /esi_5b_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /fragment_1 {
content_by_lua_block {
ngx.say("method: ", ngx.req.get_method())
local h = ngx.req.get_headers()
local h_keys = {}
for k,v in pairs(h) do
table.insert(h_keys, k)
end
table.sort(h_keys)
for _,k in ipairs(h_keys) do
ngx.say(k, ": ", h[k])
end
}
}
location /esi_5b {
default_type text/html;
content_by_lua_block {
ngx.print([[]])
}
}
--- request
POST /esi_5b_prx
--- more_headers
Cache-Control: no-cache
Cookie: foo
Authorization: bar
Range: bytes=0-
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body_like
method: GET
authorization: bar
cache-control: no-cache
cookie: foo
host: localhost
user-agent: lua-resty-http/\d+\.\d+ \(Lua\) ngx_lua/\d+ ledge_esi/\d+\.\d+[\.\d]*
x-esi-parent-uri: http://localhost/esi_5b_prx
x-esi-recursion-level: 1
--- no_error_log
[error]
=== TEST 5c: Include fragment with absolute URI, schemaless, and no path
--- http_config eval: $::HttpConfig
--- config
location /esi_5_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /fragment_1 {
echo "FRAGMENT";
}
location =/ {
echo "ROOT FRAGMENT";
}
location /esi_5 {
default_type text/html;
content_by_lua_block {
ngx.print([[]])
ngx.print([[]])
ngx.print([[]])
ngx.print([[]])
ngx.print([[]])
}
}
--- request
GET /esi_5_prx
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
FRAGMENT
FRAGMENT
ROOT FRAGMENT
ROOT FRAGMENT
ROOT FRAGMENT
--- no_error_log
[error]
=== TEST 6: Include multiple fragments, in correct order.
--- http_config eval: $::HttpConfig
--- config
location /esi_6_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /fragment_1 {
content_by_lua_block {
ngx.print("FRAGMENT_1")
}
}
location /fragment_2 {
content_by_lua_block {
ngx.print("FRAGMENT_2")
}
}
location /fragment_3 {
content_by_lua_block {
ngx.print("FRAGMENT_3")
}
}
location /esi_6 {
default_type text/html;
content_by_lua_block {
ngx.say([[]])
ngx.say([[MID LINE ]])
ngx.say([[]])
}
}
--- request
GET /esi_6_prx
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
FRAGMENT_3
MID LINE FRAGMENT_1
FRAGMENT_2
--- no_error_log
[error]
=== TEST 7: Leave instructions intact if ESI is not enabled.
--- http_config eval: $::HttpConfig
--- config
location /esi_7_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
esi_enabled = false,
})
run(handler)
}
}
location /esi_7 {
default_type text/html;
content_by_lua_block {
ngx.print("$(QUERY_STRING)")
}
}
--- request
GET /esi_7_prx?a=1
--- response_body: $(QUERY_STRING)
--- no_error_log
[error]
=== TEST 7b: Leave instructions intact if ESI delegation is enabled - slow path.
--- http_config eval: $::HttpConfig
--- config
location /esi_7b_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
esi_allow_surrogate_delegation = true,
})
run(handler)
}
}
location /esi_7b {
default_type text/html;
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.print("$(QUERY_STRING)")
}
}
--- request
GET /esi_7b_prx?a=1
--- more_headers
Surrogate-Capability: localhost="ESI/1.0"
--- response_body: $(QUERY_STRING)
--- response_headers
Surrogate-Control: content="ESI/1.0"
--- no_error_log
[error]
=== TEST 7c: Leave instructions intact if ESI delegation is enabled - fast path.
--- http_config eval: $::HttpConfig
--- config
location /esi_7b_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
esi_allow_surrogate_delegation = true,
})
run(handler)
}
}
--- request
GET /esi_7b_prx?a=1
--- more_headers
Surrogate-Capability: localhost="ESI/1.0"
--- response_body: $(QUERY_STRING)
--- response_headers
Surrogate-Control: content="ESI/1.0"
--- no_error_log
[error]
=== TEST 7d: Leave instructions intact if ESI delegation is enabled by IP
on the slow path.
--- http_config eval: $::HttpConfig
--- config
location /esi_7d_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
esi_allow_surrogate_delegation = { "127.0.0.1" },
})
run(handler)
}
}
location /esi_7d {
default_type text/html;
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.print("$(QUERY_STRING)")
}
}
--- request
GET /esi_7d_prx?a=1
--- more_headers
Surrogate-Capability: localhost="ESI/1.0"
--- response_body: $(QUERY_STRING)
--- response_headers
Surrogate-Control: content="ESI/1.0"
--- no_error_log
[error]
=== TEST 7e: Leave instructions intact if ESI delegation is enabled by IP
on the fast path.
--- http_config eval: $::HttpConfig
--- config
location /esi_7d_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
esi_allow_surrogate_delegation = { "127.0.0.1" },
})
run(handler)
}
}
--- request
GET /esi_7d_prx?a=1
--- more_headers
Surrogate-Capability: localhost="ESI/1.0"
--- response_body: $(QUERY_STRING)
--- response_headers
Surrogate-Control: content="ESI/1.0"
--- no_error_log
[error]
=== TEST 7f: Leave instructions intact if allowed types does not match
on the slow path
--- http_config eval: $::HttpConfig
--- config
location /esi_7f_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
esi_content_types = { "text/plain" },
})
run(handler)
}
}
location /esi_7f {
default_type text/html;
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.print("$(QUERY_STRING)")
}
}
--- request
GET /esi_7f_prx?a=1
--- response_body: $(QUERY_STRING)
--- response_headers
Surrogate-Control: content="ESI/1.0"
--- no_error_log
[error]
=== TEST 7g: Leave instructions intact if allowed types does not match (fast path)
--- http_config eval: $::HttpConfig
--- config
location /esi_7f_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
esi_content_types = { "text/plain" },
})
run(handler)
}
}
--- request
GET /esi_7f_prx?a=1
--- more_headers
Surrogate-Capability: localhost="ESI/1.0"
--- response_body: $(QUERY_STRING)
--- response_headers
Surrogate-Control: content="ESI/1.0"
--- no_error_log
[error]
=== TEST 8: Response downstrean cacheability is zeroed when ESI processing
has occured.
--- http_config eval: $::HttpConfig
--- config
location /esi_8_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /fragment_1 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=60"
ngx.say("FRAGMENT_1")
}
}
location /esi_8 {
default_type text/html;
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say([["]])
}
}
--- request
GET /esi_8_prx
--- response_headers_like
Cache-Control: private, max-age=0
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- no_error_log
[error]
=== TEST 9: Variable evaluation
--- http_config eval: $::HttpConfig
--- config
location /esi_9_prx {
rewrite ^(.*)_prx(.*)$ $1 break;
content_by_lua_block {
run()
}
}
location /esi_9 {
default_type text/html;
content_by_lua_block {
ngx.say("HTTP_COOKIE: $(HTTP_COOKIE)");
ngx.say("HTTP_COOKIE{SQ_SYSTEM_SESSION}: $(HTTP_COOKIE{SQ_SYSTEM_SESSION})");
ngx.say("");
ngx.say("HTTP_COOKIE: $(HTTP_COOKIE)");
ngx.say("HTTP_COOKIE{SQ_SYSTEM_SESSION}: $(HTTP_COOKIE{SQ_SYSTEM_SESSION})");
ngx.say("HTTP_COOKIE{SQ_SYSTEM_SESSION_TYPO}: $(HTTP_COOKIE{SQ_SYSTEM_SESSION_TYPO}|'default message')");
ngx.say("");
ngx.say("$(HTTP_COOKIE{SQ_SYSTEM_SESSION})$(HTTP_COOKIE)$(QUERY_STRING)")
ngx.say("$(HTTP_X_MANY_HEADERS): $(HTTP_X_MANY_HEADERS)")
ngx.say("$(HTTP_X_MANY_HEADERS{2}): $(HTTP_X_MANY_HEADERS{2})")
}
}
--- more_headers
Cookie: myvar=foo; SQ_SYSTEM_SESSION=hello
X-Many-Headers: 1
X-Many-Headers: 2
X-Many-Headers: 3, 4, 5, 6=hello
--- request
GET /esi_9_prx?t=1
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
HTTP_COOKIE: myvar=foo; SQ_SYSTEM_SESSION=hello
HTTP_COOKIE{SQ_SYSTEM_SESSION}: hello
HTTP_COOKIE: myvar=foo; SQ_SYSTEM_SESSION=hello
HTTP_COOKIE{SQ_SYSTEM_SESSION}: hello
HTTP_COOKIE{SQ_SYSTEM_SESSION_TYPO}: default message
hello$(HTTP_COOKIE)t=1
$(HTTP_X_MANY_HEADERS): 1, 2, 3, 4, 5, 6=hello
$(HTTP_X_MANY_HEADERS{2}): 3, 4, 5, 6=hello
--- no_error_log
[error]
=== TEST 9b: Multiple Variable evaluation
--- http_config eval: $::HttpConfig
--- config
location /esi_9b_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /esi_9b {
default_type text/html;
content_by_lua_block {
ngx.say([[ ]])
}
}
location /fragment1b {
content_by_lua_block {
ngx.print("FRAGMENT:"..ngx.var.args)
}
}
--- request
GET /esi_9b_prx?t=1
--- more_headers
X-ESI-Test: foobar
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
FRAGMENT:t=1&test=foobar
--- no_error_log
[error]
=== TEST 9c: Dictionary variable syntax (cookie)
--- http_config eval: $::HttpConfig
--- config
location /esi_9c_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /esi_9c {
default_type text/html;
content_by_lua_block {
ngx.say([[]])
}
}
location /fragment1c {
content_by_lua_block {
ngx.print("FRAGMENT:"..ngx.var.args)
}
}
--- request
GET /esi_9c_prx?t=1
--- more_headers
Cookie: foo=bar
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
FRAGMENT:1&test=bar
--- no_error_log
[error]
=== TEST 9d: List variable syntax (accept-language)
--- http_config eval: $::HttpConfig
--- config
location /esi_9d_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /esi_9d {
default_type text/html;
content_by_lua_block {
ngx.say([[]])
}
}
location /fragment1d {
content_by_lua_block {
ngx.print("FRAGMENT:"..ngx.var.args)
}
}
--- request
GET /esi_9d_prx?t=1
--- more_headers
Accept-Language: da, en-gb, fr
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
FRAGMENT:1&en-gb=true&de=false
--- no_error_log
[error]
=== TEST 9e: List variable syntax (accept-language) with multiple headers
--- http_config eval: $::HttpConfig
--- config
location /esi_9e_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /esi_9e {
default_type text/html;
content_by_lua_block {
ngx.say([[]])
}
}
location /fragment1d {
content_by_lua_block {
ngx.print("FRAGMENT:"..ngx.var.args)
}
}
--- request
GET /esi_9e_prx?t=1
--- more_headers
Accept-Language: da, en-gb
Accept-Language: fr
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
FRAGMENT:1&en-gb=true&de=false
--- no_error_log
[error]
=== TEST 9e: Default variable values
--- http_config eval: $::HttpConfig
--- config
location /esi_9e_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /esi_9e {
default_type text/html;
content_by_lua_block {
ngx.print("")
ngx.say("$(QUERY_STRING{a}|novalue)")
ngx.say("$(QUERY_STRING{b}|novalue)")
ngx.say("$(QUERY_STRING{c}|\'quoted values can have spaces\')")
ngx.say("$(QUERY_STRING{d}|unquoted values must not have spaces)")
ngx.print("")
}
}
--- request
GET /esi_9e_prx?a=1
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
1
novalue
quoted values can have spaces
$(QUERY_STRING{d}|unquoted values must not have spaces)
--- no_error_log
[error]
=== TEST 9f: Custom variable injection
--- http_config eval: $::HttpConfig
--- config
location /esi_9f_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
esi_custom_variables = {
["CUSTOM_DICTIONARY"] = { a = 1, b = 2},
["CUSTOM_STRING"] = "foo"
},
})
run(handler)
}
}
location /esi_9f {
default_type text/html;
content_by_lua_block {
ngx.print("")
ngx.say("$(CUSTOM_DICTIONARY|novalue)")
ngx.say("$(CUSTOM_DICTIONARY{a})")
ngx.say("$(CUSTOM_DICTIONARY{b})")
ngx.say("$(CUSTOM_DICTIONARY{c}|novalue)")
ngx.say("$(CUSTOM_STRING)")
ngx.say("$(CUSTOM_STRING{x}|novalue)")
ngx.print("")
}
}
--- request
GET /esi_9f_prx?a=1
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
novalue
1
2
novalue
foo
novalue
--- no_error_log
[error]
=== TEST 10: Prime ESI in cache.
--- http_config eval: $::HttpConfig
--- config
location /esi_10_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
esi_enabled = true,
cache_key_spec = {
"scheme",
"host",
"uri",
}
})
run(handler)
}
}
location /esi_10 {
default_type text/html;
content_by_lua_block {
ngx.status = 404
ngx.header["Cache-Control"] = "max-age=3600"
ngx.header["Etag"] = "esi10"
ngx.say("$(QUERY_STRING)")
}
}
--- request
GET /esi_10_prx?t=1
--- response_body
t=1
--- error_code: 404
--- response_headers_like
X-Cache: MISS from .*
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- no_error_log
[error]
=== TEST 10b: ESI still runs on cache HIT.
--- http_config eval: $::HttpConfig
--- config
location /esi_10 {
content_by_lua_block {
local handler = require("ledge").create_handler({
esi_enabled = true,
cache_key_spec = {
"scheme",
"host",
"uri",
}
})
run(handler)
}
}
--- request
GET /esi_10?t=2
--- response_body
t=2
--- error_code: 404
--- response_headers_like
X-Cache: HIT from .*
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- no_error_log
[error]
=== TEST 10c: ESI still runs on cache revalidation, upstream 200.
--- http_config eval: $::HttpConfig
--- config
location /esi_10_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
esi_enabled = true,
cache_key_spec = {
"scheme",
"host",
"uri",
}
})
run(handler)
}
}
location /esi_10 {
default_type text/html;
content_by_lua_block {
ngx.status = 404
ngx.header["Cache-Control"] = "max-age=3600"
ngx.header["Etag"] = "esi10c"
ngx.say("$(QUERY_STRING)")
}
}
--- more_headers
Cache-Control: max-age=0
If-None-Match: esi10
--- request
GET /esi_10_prx?t=3
--- response_body
t=3
--- error_code: 404
--- response_headers_like
X-Cache: MISS from .*
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- no_error_log
[error]
=== TEST 10d: ESI still runs on cache revalidation, upstream 200, locally valid.
--- http_config eval: $::HttpConfig
--- config
location /esi_10_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
esi_enabled = true,
cache_key_spec = {
"scheme",
"host",
"uri",
}
})
run(handler)
}
}
location /esi_10 {
default_type text/html;
content_by_lua_block {
ngx.status = 404
ngx.header["Cache-Control"] = "max-age=3600"
ngx.header["Etag"] = "esi10d"
ngx.say("$(QUERY_STRING)")
}
}
--- more_headers
Cache-Control: max-age=0
If-None-Match: esi10c
--- request
GET /esi_10_prx?t=4
--- response_body
t=4
--- error_code: 404
--- response_headers_like
X-Cache: MISS from .*
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- no_error_log
[error]
=== TEST 10e: ESI still runs on cache revalidation, upstream 304, locally valid.
--- http_config eval: $::HttpConfig
--- config
location /esi_10_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
esi_enabled = true,
cache_key_spec = {
"scheme",
"host",
"uri",
}
})
run(handler)
}
}
location /esi_10 {
content_by_lua_block {
ngx.exit(ngx.HTTP_NOT_MODIFIED)
}
}
--- more_headers
Cache-Control: max-age=0
If-None-Match: esi10
--- request
GET /esi_10_prx?t=5
--- response_body
t=5
--- error_code: 404
--- response_headers_like
X-Cache: MISS from .*
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- no_error_log
[error]
=== TEST 11a: Prime fragment
--- http_config eval: $::HttpConfig
--- config
location /fragment_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /fragment {
default_type text/html;
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("FRAGMENT")
}
}
--- request
GET /fragment_prx
--- response_body
FRAGMENT
--- error_code: 200
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- no_error_log
[error]
=== TEST 11b: Include fragment with client validators.
--- http_config eval: $::HttpConfig
--- config
location /esi_11_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
ngx.req.set_header("If-Modified-Since", ngx.http_time(ngx.time() + 150))
run()
}
}
location /fragment_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /fragment {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("FRAGMENT MODIFIED")
}
}
location /esi_11 {
default_type text/html;
content_by_lua_block {
ngx.say("1")
ngx.print([[]])
ngx.say("2")
}
}
--- request
GET /esi_11_prx
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
1
FRAGMENT
2
--- no_error_log
[error]
=== TEST 11c: Include fragment with " H" in URI
Bad req in Nginx unless encoded
--- http_config eval: $::HttpConfig
--- config
location /esi_11c_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location "/frag Hment" {
content_by_lua_block {
ngx.say("FRAGMENT")
}
}
location /esi_11c {
default_type text/html;
content_by_lua_block {
ngx.say("1")
ngx.print([[]])
ngx.say("2")
}
}
--- request
GET /esi_11c_prx
--- response_body
1
FRAGMENT
2
--- error_code: 200
--- no_error_log
[error]
=== TEST 11d: Use callback feature to modify fragment request params
--- http_config eval: $::HttpConfig
--- config
location /esi_11d_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("before_esi_include_request", function(req_params)
req_params.headers["X-Foo"] = "bar"
end)
run(handler)
}
}
location "/fragment" {
content_by_lua_block {
ngx.say(ngx.req.get_headers()["X-Foo"])
ngx.say("FRAGMENT")
}
}
location /esi_11d {
default_type text/html;
content_by_lua_block {
ngx.say("1")
ngx.print([[]])
ngx.say("2")
}
}
--- request
GET /esi_11d_prx
--- response_body
1
bar
FRAGMENT
2
--- error_code: 200
--- no_error_log
[error]
=== TEST 12: ESI processed over buffer larger than buffer_size.
--- http_config eval: $::HttpConfig
--- config
location /esi_12_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
buffer_size = 16,
})
run(handler)
}
}
location /esi_12 {
default_type text/html;
content_by_lua_block {
local junk = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
ngx.print("")
ngx.say(junk)
ngx.say("$(QUERY_STRING)")
ngx.say(junk)
ngx.print("")
}
}
--- request
GET /esi_12_prx?a=1
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
a=1
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
--- no_error_log
[error]
=== TEST 12b: Incomplete ESI tag opening at the end of buffer (lookahead)
--- http_config eval: $::HttpConfig
--- config
location /esi_12b_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
buffer_size = 4,
})
run(handler)
}
}
location /esi_12b {
default_type text/html;
content_by_lua_block {
ngx.print("---")
ngx.print("$(QUERY_STRING)")
ngx.print("")
}
}
--- request
GET /esi_12b_prx?a=1
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body: ---a=1
--- no_error_log
[error]
=== TEST 12c: Incomplete ESI tag opening at the end of buffer (lookahead)
--- http_config eval: $::HttpConfig
--- config
location /esi_12c_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
buffer_size = 5,
})
run(handler)
}
}
location /esi_12c {
default_type text/html;
content_by_lua_block {
ngx.print("---")
ngx.print("$(QUERY_STRING)")
ngx.print("")
}
}
--- request
GET /esi_12c_prx?a=1
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body: ---a=1
--- no_error_log
[error]
=== TEST 12d: Incomplete ESI tag opening at the end of buffer (lookahead)
--- http_config eval: $::HttpConfig
--- config
location /esi_12d_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
buffer_size = 6,
})
run(handler)
}
}
location /esi_12d {
default_type text/html;
content_by_lua_block {
ngx.print("---")
ngx.print("$(QUERY_STRING)")
ngx.print("")
}
}
--- request
GET /esi_12d_prx?a=1
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body: ---a=1
--- no_error_log
[error]
=== TEST 12e: Incomplete ESI tag opening at the end of response (regression)
--- http_config eval: $::HttpConfig
--- config
location /esi_12e_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
buffer_size = 9,
})
run(handler)
}
}
location /esi_12e {
default_type text/html;
content_by_lua_block {
ngx.print("---")
ngx.print("$(QUERY_STRING)")
ngx.print("")
ngx.say(junk)
ngx.say("$(QUERY_STRING)")
ngx.say(junk)
ngx.say("")
}
}
--- request
GET /esi_13_prx?a=1
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
$(QUERY_STRING)
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
--- error_log
esi scan bailed as instructions spanned buffers larger than esi_max_size
=== TEST 14: choose - when - otherwise, first when matched
--- http_config eval: $::HttpConfig
--- config
location /esi_14_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /esi_14 {
default_type text/html;
content_by_lua_block {
local content = [[Hello
True
Still true, but first match wins
Will never happen
Goodbye]]
ngx.say(content)
}
}
--- request
GET /esi_14_prx?a=1
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
Hello
True
Goodbye
--- no_error_log
[error]
=== TEST 15: choose - when - otherwise, second when matched
--- http_config eval: $::HttpConfig
--- config
location /esi_15_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /esi_15 {
default_type text/html;
content_by_lua_block {
local content = [[Hello
1
2
Still true, but first match wins
Will never happen
Goodbye]]
ngx.say(content)
}
}
--- request
GET /esi_15_prx?a=2
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
Hello
2
Goodbye
--- no_error_log
[error]
=== TEST 16: choose - when - otherwise, otherwise catchall
--- http_config eval: $::HttpConfig
--- config
location /esi_16_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /esi_16 {
default_type text/html;
content_by_lua_block {
local content = [[Hello
1
2
Otherwise
Goodbye]]
ngx.say(content)
}
}
--- request
GET /esi_16_prx?a=3
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
Hello
Otherwise
Goodbye
--- no_error_log
[error]
=== TEST 16c: multiple single line choose - when - otherwise
--- http_config eval: $::HttpConfig
--- config
location /esi_16c_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /esi_16c {
default_type text/html;
content_by_lua_block {
local content = [[1Otherwise: 3NOPE]]
ngx.print(content)
}
}
--- request
GET /esi_16c_prx?a=3
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body: Otherwise: 3
--- no_error_log
[error]
=== TEST 17: choose - when - test, conditional syntax
--- http_config eval: $::HttpConfig
--- config
location /esi_17_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /esi_17 {
default_type text/html;
content_by_lua_block {
local conditions = {
"1 == 1",
"1==1",
"1 != 2",
"2 > 1",
"1 > 2 | 3 > 2",
"(1 > 2) | (3.02 > 2.4124 & 1 <= 1)",
"(1>2)||(3>2&&2>1)",
"! (1 < 2) | (3 > 2 & 2 >= 1)",
"'hello' == 'hello'",
"'hello' != 'goodbye'",
"'repeat' != 'function'", -- use of lua words in strings
"'repeat' != function", -- use of lua words unquoted
"' repeat sentence with function in it ' == ' repeat sentence with function in it '", -- use of lua words in strings
"$(QUERY_STRING{msg}) == 'hello'",
[['string \' escaping' == 'string \' escaping']],
[['string \" escaping' == 'string \" escaping']],
[[$(QUERY_STRING{msg2}) == 'hel\'lo']],
"'hello' =~ '/llo/'",
[['HeL\'\'\'Lo' =~ '/hel[\']{1,3}lo/i']],
[['http://example.com?foo=bar' =~ '/^(http[s]?)://([^:/]+)(?::(\d+))?(.*)/']],
[['htxtp://example.com?foo=bar' =~ '/^(http[s]?)://([^:/]+)(?::(\d+))?(.*)/']],
"(1 > 2) | (3.02 > 2.4124 & 1 <= 1) && ('HeLLo' =~ '/hello/i')",
"2 =~ '/[0-9]/'",
"$(HTTP_ACCEPT_LANGUAGE{gb}) == 'true'",
"$(HTTP_ACCEPT_LANGUAGE{fr}) == 'false'",
"$(HTTP_ACCEPT_LANGUAGE{fr}) == 'true'",
}
for _,c in ipairs(conditions) do
ngx.say([[]], c,
[[Failed]])
end
}
}
--- request
GET /esi_17_prx?msg=hello&msg2=hel'lo
--- more_headers
Accept-Language: en-gb
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
1 == 1
1==1
1 != 2
2 > 1
1 > 2 | 3 > 2
(1 > 2) | (3.02 > 2.4124 & 1 <= 1)
(1>2)||(3>2&&2>1)
! (1 < 2) | (3 > 2 & 2 >= 1)
'hello' == 'hello'
'hello' != 'goodbye'
'repeat' != 'function'
Failed
' repeat sentence with function in it ' == ' repeat sentence with function in it '
hello == 'hello'
'string \' escaping' == 'string \' escaping'
'string \" escaping' == 'string \" escaping'
hel'lo == 'hel\'lo'
'hello' =~ '/llo/'
'HeL\'\'\'Lo' =~ '/hel[\']{1,3}lo/i'
'http://example.com?foo=bar' =~ '/^(http[s]?)://([^:/]+)(?::(\d+))?(.*)/'
Failed
(1 > 2) | (3.02 > 2.4124 & 1 <= 1) && ('HeLLo' =~ '/hello/i')
2 =~ '/[0-9]/'
true == 'true'
false == 'false'
Failed
=== TEST 17b: Lexer complains about unparseable conditions
--- http_config eval: $::HttpConfig
--- config
location /esi_17b_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /esi_17b {
default_type text/html;
content_by_lua_block {
local content = [[
OK
OK
OK
Otherwise
]]
ngx.print(content)
}
}
--- request
GET /esi_17b_prx
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
Otherwise
--- error_log
Parse error: found string after string in: "'hello' 'there'"
Parse error: found string after number in: "3 'hello'"
Parse error: found number after string in: "'hello' 4"
=== TEST 18: Surrogate-Control with lower version number still works.
--- http_config eval: $::HttpConfig
--- config
location /esi_18_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("after_upstream_request", function(res)
res.header["Surrogate-Control"] = [[content="ESI/0.8"]]
end)
handler:run()
}
}
location /esi_18 {
default_type text/html;
content_by_lua_block {
ngx.print("$(QUERY_STRING)")
}
}
--- request
GET /esi_18_prx?a=1
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body: a=1
--- no_error_log
[error]
=== TEST 19: Surrogate-Control with higher version fails
--- http_config eval: $::HttpConfig
--- config
location /esi_19_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("after_upstream_request", function(res)
res.header["Surrogate-Control"] = [[content="ESI/1.1"]]
end)
handler:run()
}
}
location /esi_19 {
default_type text/html;
content_by_lua_block {
ngx.print("$(QUERY_STRING)")
}
}
--- request
GET /esi_19_prx
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body: $(QUERY_STRING)
--- no_error_log
[error]
=== TEST 20: Test we advertise Surrogate-Capability
--- http_config eval: $::HttpConfig
--- config
location /esi_20_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /esi_20 {
default_type text/html;
content_by_lua_block {
ngx.print(ngx.req.get_headers()["Surrogate-Capability"])
}
}
--- request
GET /esi_20_prx
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body_like: ^(.*)="ESI/1.0"$
--- no_error_log
[error]
=== TEST 20b: Surrogate-Capability using visible_hostname
--- http_config eval: $::HttpConfig
--- config
location /esi_20_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
visible_hostname = "ledge.example.com"
})
run(handler)
}
}
location /esi_20 {
default_type text/html;
content_by_lua_block {
ngx.print(ngx.req.get_headers()["Surrogate-Capability"])
}
}
--- request
GET /esi_20_prx
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body: ledge.example.com="ESI/1.0"
--- no_error_log
[error]
=== TEST 21: Test Surrogate-Capability is appended when needed
--- http_config eval: $::HttpConfig
--- config
location /esi_21_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /esi_21 {
default_type text/html;
content_by_lua_block {
ngx.print(ngx.req.get_headers()["Surrogate-Capability"])
}
}
--- request
GET /esi_21_prx
--- more_headers
Surrogate-Capability: abc="ESI/0.8"
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body_like: ^abc="ESI/0.8", (.*)="ESI/1.0"$
--- no_error_log
[error]
=== TEST 22: Test comments are removed.
--- http_config eval: $::HttpConfig
--- config
location /esi_22_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /esi_22 {
default_type text/html;
content_by_lua_block {
ngx.print([[1234 5678]])
}
}
--- request
GET /esi_22_prx
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body: 1234 5678
--- no_error_log
[error]
=== TEST 23a: Surrogate-Control removed when ESI enabled but no work needed
(slow path)
--- http_config eval: $::HttpConfig
--- config
location /esi_23_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /esi_23 {
default_type text/html;
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.print("NO ESI")
}
}
--- request
GET /esi_23_prx?a=1
--- response_body: NO ESI
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- no_error_log
[error]
=== TEST 23b: Surrogate-Control removed when ESI enabled but no work needed
(fast path)
--- http_config eval: $::HttpConfig
--- config
location /esi_23_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
--- request
GET /esi_23_prx?a=1
--- response_body: NO ESI
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- no_error_log
[error]
=== TEST 24a: Fragment recursion limit
--- http_config eval: $::HttpConfig
--- config
location /esi_24_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
-- recursion limit fails on tiny buffer sizes because it can't be scanned
local handler = require("ledge").create_handler({
buffer_size = 4096,
})
run(handler)
}
}
location /fragment_24_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
buffer_size = 4096,
})
run(handler)
}
}
location /fragment_24 {
default_type text/html;
content_by_lua_block {
ngx.say("c: ", ngx.req.get_headers()["X-ESI-Recursion-Level"] or "0")
ngx.print([[]])
ngx.print([[]])
}
}
location /esi_24 {
default_type text/html;
content_by_lua_block {
ngx.say("p: ", ngx.req.get_headers()["X-ESI-Recursion-Level"] or "0")
ngx.print([[]])
}
}
--- request
GET /esi_24_prx
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
p: 0
c: 1
p: 2
c: 3
p: 4
c: 5
p: 6
c: 7
p: 8
c: 9
p: 10
--- error_log
ESI recursion limit (10) exceeded
=== TEST 24b: Lower fragment recursion limit
--- http_config eval: $::HttpConfig
--- config
location /esi_24_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
buffer_size = 4096,
esi_recursion_limit = 5,
})
run(handler)
}
}
location /fragment_24_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
buffer_size = 4096,
esi_recursion_limit = 5,
})
run(handler)
}
}
location /fragment_24 {
default_type text/html;
content_by_lua_block {
ngx.say("c: ", ngx.req.get_headers()["X-ESI-Recursion-Level"] or "0")
ngx.print([[]])
ngx.print([[]])
}
}
location /esi_24 {
default_type text/html;
content_by_lua_block {
ngx.say("p: ", ngx.req.get_headers()["X-ESI-Recursion-Level"] or "0")
ngx.print([[]])
}
}
--- request
GET /esi_24_prx
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
p: 0
c: 1
p: 2
c: 3
p: 4
c: 5
--- error_log
ESI recursion limit (5) exceeded
=== TEST 25: Multiple esi includes on a single line
--- http_config eval: $::HttpConfig
--- config
location /esi_25_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /fragment_25a {
default_type text/html;
content_by_lua_block {
ngx.print("25a")
}
}
location /fragment_25b {
default_type text/html;
content_by_lua_block {
ngx.print("25b")
}
}
location /esi_25 {
default_type text/html;
content_by_lua_block {
ngx.print([[ ]])
}
}
--- request
GET /esi_25_prx
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body: 25a 25b
--- no_error_log
[error]
=== TEST 26: Include tag whitespace
--- http_config eval: $::HttpConfig
--- config
location /esi_26_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /fragment_1 {
echo "FRAGMENT";
}
location /esi_26 {
default_type text/html;
content_by_lua_block {
ngx.say("1")
ngx.print([[]])
ngx.say("2")
ngx.print([[]])
}
}
--- request
GET /esi_26_prx
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
1
FRAGMENT
2
FRAGMENT
--- no_error_log
[error]
=== TEST 27a: Prime cache, immediately expired
--- http_config eval: $::HttpConfig
--- config
location /esi_27_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("before_save", function(res)
-- immediately expire cache entries
res.header["Cache-Control"] = "max-age=0"
end)
run(handler)
}
}
location /esi_27 {
default_type text/html;
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=60"
ngx.say("$(QUERY_STRING)")
}
}
--- request
GET /esi_27_prx?a=1
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
a=1
--- no_error_log
[error]
=== TEST 27b: ESI still works when serving stale
--- http_config eval: $::HttpConfig
--- config
location /esi_27_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
--- more_headers
Cache-Control: stale-while-revalidate=60
--- request
GET /esi_27_prx?a=1
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
a=1
--- no_error_log
[error]
=== TEST 27c: ESI still works when serving stale-if-error
--- http_config eval: $::HttpConfig
--- config
location /esi_27_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /esi_27 {
return 500;
}
--- more_headers
Cache-Control: stale-if-error=9999
--- request
GET /esi_27_prx?a=1
--- wait: 1
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
a=1
--- wait: 2
--- no_error_log
[error]
=== TEST 28: Remaining parent response returned on fragment error
--- http_config eval: $::HttpConfig
--- config
location /esi_28_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /fragment_1 {
return 500;
echo "FRAGMENT";
}
location /esi_28 {
default_type text/html;
content_by_lua_block {
ngx.say("1")
ngx.print([[]])
ngx.say("2")
}
}
--- request
GET /esi_28_prx
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
1
2
--- error_log
500 from /fragment_1
=== TEST 29: Remaining parent response chunks returned on fragment error
--- http_config eval: $::HttpConfig
--- config
location /esi_29_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
buffer_size = 16,
})
run(handler)
}
}
location /fragment_1 {
return 500;
echo "FRAGMENT";
}
location /esi_29 {
default_type text/html;
content_by_lua_block {
local junk = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
ngx.say(junk)
ngx.say("1")
ngx.print([[]])
ngx.say(junk)
ngx.say("2")
}
}
--- request
GET /esi_29_prx
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
1
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
2
--- error_log
500 from /fragment_1
=== TEST 30: Prime with ESI args - which should not enter cache key or
reach the origin
--- http_config eval: $::HttpConfig
--- config
location /esi_30_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
esi_enabled = true,
esi_args_prefix = "_esi_",
})
run(handler)
}
}
location /esi_30 {
default_type text/html;
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.print("$(ESI_ARGS{a}|noarg): ")
ngx.say(ngx.req.get_uri_args()["_esi_a"])
ngx.print("$(ESI_ARGS{b}|noarg): ")
ngx.say(ngx.req.get_uri_args()["_esi_b"])
ngx.say("$(ESI_ARGS|noarg)")
}
}
--- request
GET /esi_30_prx?_esi_a=1&_esi_b=2
--- response_body_like
1: nil
2: nil
_esi_[ab]=[12]&_esi_[ab]=[12]
--- error_code: 200
--- response_headers_like
X-Cache: MISS from .*
--- no_error_log
[error]
=== TEST 30b: ESI args vary, but cache is a HIT
--- http_config eval: $::HttpConfig
--- config
location /esi_30_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /esi_30 {
default_type text/html;
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.print("MISS")
}
}
--- request eval
["GET /esi_30_prx?esi_a=2", "GET /esi_30_prx?esi_a=3", "GET /esi_30_prx?bad_esi_a=4"]
--- response_body eval
["2: nil
noarg: nil
esi_a=2
",
"3: nil
noarg: nil
esi_a=3
",
"MISS"]
--- error_code eval
["200", "200", "200"]
--- response_headers_like eval
["X-Cache: HIT from .*", "X-Cache: HIT from .*", "X-Cache: MISS from .*"]
--- no_error_log
[error]
=== TEST 30c: As 30 but with request not accepting cache
--- http_config eval: $::HttpConfig
--- config
location /esi_30c_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /esi_30c {
default_type text/html;
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.print("$(ESI_ARGS{a}|noarg): ")
ngx.print(ngx.req.get_uri_args()["esi_a"])
}
}
--- more_headers
Cache-Control: no-cache
--- request
GET /esi_30c_prx?esi_a=1
--- response_body: 1: nil
--- error_code: 200
--- response_headers_like
X-Cache: MISS from .*
--- no_error_log
[error]
=== TEST 31a: Multiple sibling and child conditionals, winning expressions at various depths
--- http_config eval: $::HttpConfig
--- config
location /esi_31a_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /esi_31a {
default_type text/html;
content_by_lua_block {
local content = [[
BEFORE CONTENT
a
b
RANDOM ILLEGAL CONTENT
c
l1d
l1e
l2f
l2 OTHERWISE
l1 OTHERWISE
l2g
AFTER CONTENT]]
ngx.print(content)
}
}
--- request eval
[
"GET /esi_31a_prx?a=a",
"GET /esi_31a_prx?b=b",
"GET /esi_31a_prx?a=a&b=b",
"GET /esi_31a_prx?l1d=l1d",
"GET /esi_31a_prx?c=c&l1d=l1d",
"GET /esi_31a_prx?c=c&l1e=l1e&l2f=l2f",
"GET /esi_31a_prx?c=c&l1e=l1e",
"GET /esi_31a_prx?c=c",
"GET /esi_31a_prx?c=c&l2g=l2g",
]
--- response_body eval
[
"BEFORE CONTENT
a
AFTER CONTENT",
"BEFORE CONTENT
b
AFTER CONTENT",
"BEFORE CONTENT
a
b
AFTER CONTENT",
"BEFORE CONTENT
AFTER CONTENT",
"BEFORE CONTENT
c
l1d
AFTER CONTENT",
"BEFORE CONTENT
c
l1e
l2f
AFTER CONTENT",
"BEFORE CONTENT
c
l1e
l2 OTHERWISE
AFTER CONTENT",
"BEFORE CONTENT
c
l1 OTHERWISE
AFTER CONTENT",
"BEFORE CONTENT
c
l1 OTHERWISE
l2g
AFTER CONTENT",
]
--- no_error_log
[error]
=== TEST 31b: As above, no whitespace
--- http_config eval: $::HttpConfig
--- config
location /esi_31b_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
buffer_size = 200,
})
run(handler)
}
}
location /esi_31b {
default_type text/html;
content_by_lua_block {
local content = [[BEFORE CONTENTabRANDOM ILLEGAL CONTENTcl1dl1el2fl2 OTHERWISEl1 OTHERWISEl2gAFTER CONTENT]]
ngx.print(content)
}
}
--- request eval
[
"GET /esi_31b_prx?a=a",
"GET /esi_31b_prx?b=b",
"GET /esi_31b_prx?a=a&b=b",
"GET /esi_31b_prx?l1d=l1d",
"GET /esi_31b_prx?c=c&l1d=l1d",
"GET /esi_31b_prx?c=c&l1e=l1e&l2f=l2f",
"GET /esi_31b_prx?c=c&l1e=l1e",
"GET /esi_31b_prx?c=c",
"GET /esi_31b_prx?c=c&l2g=l2g",
]
--- response_body eval
["BEFORE CONTENTaAFTER CONTENT",
"BEFORE CONTENTbAFTER CONTENT",
"BEFORE CONTENTabAFTER CONTENT",
"BEFORE CONTENTAFTER CONTENT",
"BEFORE CONTENTcl1dAFTER CONTENT",
"BEFORE CONTENTcl1el2fAFTER CONTENT",
"BEFORE CONTENTcl1el2 OTHERWISEAFTER CONTENT",
"BEFORE CONTENTcl1 OTHERWISEAFTER CONTENT",
"BEFORE CONTENTcl1 OTHERWISEl2gAFTER CONTENT"]
--- no_error_log
[error]
=== TEST 32: Tag parsing boundaries
--- http_config eval: $::HttpConfig
--- config
location /esi_32_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
buffer_size = 50,
})
run(handler)
}
}
location /esi_32 {
default_type text/html;
content_by_lua_block {
local content = [[
BEFORE CONTENT
a
AFTER CONTENT
]]
ngx.print(content)
}
}
location /fragment {
echo "OK";
}
--- request
GET /esi_32_prx?a=a
--- response_body
BEFORE CONTENT
a
OK
AFTER CONTENT
--- no_error_log
[error]
=== TEST 33: Invalid Surrogate-Capability header is ignored
--- http_config eval: $::HttpConfig
--- config
location /esi_33_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
esi_allow_surrogate_delegation = true,
})
run(handler)
}
}
location /esi_33 {
default_type text/html;
content_by_lua_block {
ngx.header["Surrogate-Control"] = 'content="ESI/1.0"'
ngx.print("$(QUERY_STRING)")
}
}
--- request
GET /esi_33_prx?foo=bar
--- more_headers
Surrogate-capability: localhost="ESI/1foo"
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body: foo=bar
--- no_error_log
[error]
=== TEST 34: Leave instructions intact if surrogate-capability does not
match http host
--- http_config eval: $::HttpConfig
--- config
location /esi_34_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
esi_allow_surrogate_delegation = true,
})
run(handler)
}
}
location /esi_34 {
default_type text/html;
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.print("$(QUERY_STRING)")
}
}
--- request
GET /esi_34_prx?a=1
--- more_headers
Surrogate-Capability: esi.example.com="ESI/1.0"
--- response_body: $(QUERY_STRING)
--- response_headers
Surrogate-Control: content="ESI/1.0"
--- no_error_log
[error]
=== TEST 35: ESI_ARGS instruction with no args in query string
reach the origin
--- http_config eval: $::HttpConfig
--- config
location /esi_35_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
esi_enabled = true,
esi_args_prefix = "_esi_",
})
run(handler)
}
}
location /esi_35 {
default_type text/html;
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("$(ESI_ARGS{a}|noarg)")
ngx.say("$(ESI_ARGS{b}|noarg)")
ngx.say("$(ESI_ARGS|noarg)")
ngx.say("OK")
}
}
--- request
GET /esi_35_prx?foo=bar
--- response_body
noarg
noarg
noarg
OK
--- error_code: 200
--- response_headers_like
X-Cache: MISS from .*
--- no_error_log
[error]
=== TEST 36: No error if res.has_esi incorrectly set_debug
--- http_config eval: $::HttpConfig
--- config
location /esi_36_break {
rewrite ^(.*)_break$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
local redis = require("ledge").create_redis_connection()
handler.redis = redis
local key = handler:cache_key_chain().main
-- Incorrectly set has_esi flag on main key
redis:hset(key, "has_esi", "ESI/1.0")
ngx.print("OK")
}
}
location /esi_36_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge.state_machine").set_debug(true)
-- No surrogate control here
require("ledge").create_handler():run()
}
}
location /esi_36 {
default_type text/html;
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=60"
ngx.print("Hello")
}
}
--- request eval
[
"GET /esi_36_prx",
"GET /esi_36_break",
"GET /esi_36_prx",
]
--- response_body eval
[
"Hello",
"OK",
"Hello",
]
--- response_headers_like eval
[
"X-Cache: MISS from .*",
"",
"X-Cache: HIT from .*",
]
--- no_error_log
[error]
=== TEST 37: SSRF
--- http_config eval: $::HttpConfig
--- config
location /esi_37_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
ngx.req.set_uri_args('evil=foo"/>')
run()
}
}
location /fragment_1 {
content_by_lua_block {
ngx.say("FRAGMENT")
}
}
location /esi_ {
default_type text/html;
content_by_lua_block {
ngx.print([[]])
}
}
location /bad_frag {
content_by_lua_block {
ngx.log(ngx.ERR, "Shouldn't be able to request this")
}
}
--- request
GET /esi_37_prx
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- no_error_log
[error]
=== TEST 38: SSRF via
--- http_config eval: $::HttpConfig
--- config
location /esi_38_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
ngx.req.set_uri_args('evil=')
run()
}
}
location /esi_ {
default_type text/html;
content_by_lua_block {
ngx.say([[$(QUERY_STRING{evil})]])
}
}
location /bad_frag {
content_by_lua_block {
ngx.log(ngx.ERR, "Shouldn't be able to request this")
}
}
--- request
GET /esi_38_prx
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
<esi:include src="/bad_frag" />
--- no_error_log
[error]
=== TEST 39: XSS via
--- http_config eval: $::HttpConfig
--- config
location /esi_39_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
ngx.req.set_uri_args('evil=')
run()
}
}
location /esi_ {
default_type text/html;
content_by_lua_block {
ngx.say([[$(QUERY_STRING{evil})]])
ngx.say([[$(RAW_QUERY_STRING{evil})]])
}
}
--- request
GET /esi_39_prx
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
<script>alert("HAXXED");</script>
--- no_error_log
[error]
=== TEST 40: ESI vars in when/choose blocks are replaced
--- http_config eval: $::HttpConfig
--- config
location /esi_40_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /esi_40 {
default_type text/html;
content_by_lua_block {
local content = [[
$(QUERY_STRING{a})
$(RAW_QUERY_STRING{tag})
$(QUERY_STRING{tag})
Will never happen
]]
ngx.print(content)
}
}
--- request
GET /esi_40_prx?a=1&tag=foobar
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
1
foobar
foo<script>alert("bad!")</script>bar
--- no_error_log
[error]
=== TEST 41: Vars inside when/choose blocks are not evaluated before esi includes
--- http_config eval: $::HttpConfig
--- config
location /esi_41_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
ngx.req.set_uri_args('a=test&evil="')
run()
}
}
location /esi_ {
default_type text/html;
content_by_lua_block {
local content = [[BEFORE $(QUERY_STRING{a})
$(QUERY_STRING{a})
Will never happen
AFTER]]
ngx.say(content)
}
}
location /fragment_1 {
content_by_lua_block {
ngx.print("FRAGMENT")
}
}
location /bad_frag {
content_by_lua_block {
ngx.log(ngx.ERR, "Shouldn't be able to request this")
}
}
--- request
GET /esi_41_prx
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
BEFORE $(QUERY_STRING{a})
FRAGMENT
test
AFTER
--- no_error_log
[error]
=== TEST 42: By default includes to 3rd party domains are allowed
--- http_config eval: $::HttpConfig
--- config
location /esi_42_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /esi_42 {
default_type text/html;
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local content = [[]]
ngx.say(content)
}
}
--- request
GET /esi_42_prx
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
{
"userId": 1,
"id": 1,
"title": "delectus aut autem",
"completed": false
}
--- no_error_log
[error]
=== TEST 43: Disable third party includes
--- http_config eval: $::HttpConfig
--- config
location /esi_43_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
esi_disable_third_party_includes = true,
})
run(handler)
}
}
location /esi_43 {
default_type text/html;
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local content = [[]]
ngx.print(content)
}
}
--- request
GET /esi_43_prx
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body:
--- no_error_log
[error]
=== TEST 44: White list third party includes
--- http_config eval: $::HttpConfig
--- config
location /esi_44_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
esi_disable_third_party_includes = true,
esi_third_party_includes_domain_whitelist = {
["jsonplaceholder.typicode.com"] = true,
},
})
run(handler)
}
}
location /esi_44 {
default_type text/html;
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local content = [[]]
ngx.say(content)
}
}
--- request
GET /esi_44_prx
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
{
"userId": 1,
"id": 1,
"title": "delectus aut autem",
"completed": false
}
--- no_error_log
[error]
=== TEST 45: Cookies and Authorization propagate to fragment on same domain
--- http_config eval: $::HttpConfig
--- config
location /esi_45_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /fragment_1 {
content_by_lua_block {
ngx.say("method: ", ngx.req.get_method())
local h = ngx.req.get_headers()
local h_keys = {}
for k,v in pairs(h) do
table.insert(h_keys, k)
end
table.sort(h_keys)
for _,k in ipairs(h_keys) do
ngx.say(k, ": ", h[k])
end
}
}
location /esi_45 {
default_type text/html;
content_by_lua_block {
ngx.print([[]])
}
}
--- request
POST /esi_45_prx
--- more_headers
Cache-Control: no-cache
Cookie: foo
Authorization: bar
Range: bytes=0-
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body_like
method: GET
authorization: bar
cache-control: no-cache
cookie: foo
host: localhost
user-agent: lua-resty-http/\d+\.\d+ \(Lua\) ngx_lua/\d+ ledge_esi/\d+\.\d+[\.\d]*
x-esi-parent-uri: http://localhost/esi_45_prx
x-esi-recursion-level: 1
--- no_error_log
[error]
=== TEST 45b: Cookies and Authorization don't propagate to fragment on different domain
--- http_config eval: $::HttpConfig
--- config
location /esi_45_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
run()
}
}
location /esi_45 {
default_type text/html;
content_by_lua_block {
ngx.print([[]])
}
}
--- request
POST /esi_45_prx
--- more_headers
Cache-Control: no-cache
Cookie: foo
Authorization: bar
Range: bytes=0-
Accept: text/plain
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body_like
(.*)"method": "GET",
(.*)"cache-control": "no-cache",
--- response_body_unlike
(.*)"authorization": "bar",
(.*)"cookie": "foo",
--- no_error_log
[error]
=== TEST 46: Cookie var blacklist
--- http_config eval: $::HttpConfig
--- config
location /esi_46_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
esi_vars_cookie_blacklist = {
not_allowed = true,
},
})
run(handler)
}
}
location /esi_46 {
default_type text/html;
content_by_lua_block {
-- Blacklist should apply to expansion in vars
ngx.say([[$(HTTP_COOKIE)]])
-- And by key
ngx.say([[$(HTTP_COOKIE{allowed}):$(HTTP_COOKIE{not_allowed})]])
-- ...and also in URIs
ngx.say([[]])
}
}
location /fragment {
content_by_lua_block {
ngx.say("FRAGMENT:"..ngx.var.args)
-- But ALL cookies are still propagated by default to subrequests
local cookie = require("resty.cookie").new()
ngx.print(cookie:get("allowed") .. ":" .. cookie:get("not_allowed"))
}
}
--- request
GET /esi_46_prx
--- more_headers
Cookie: allowed=yes
Cookie: also_allowed=yes
Cookie: not_allowed=no
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body_like
(allowed=yes; also_allowed=yes)|(also_allowed=yes; allowed=yes)
yes:
FRAGMENT:&allowed=yes¬_allowed=
yes:no
--- no_error_log
[error]
================================================
FILE: t/02-integration/events.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config();
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: before_serve (add response header)
--- http_config eval: $::HttpConfig
--- config
location /events_1_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("before_serve", function(res)
res.header["X-Modified"] = "Modified"
end)
handler:run()
}
}
location /events_1 {
echo "ORIGIN";
}
--- request
GET /events_1_prx
--- error_code: 200
--- response_headers
X-Modified: Modified
--- no_error_log
[error]
=== TEST 2: before_upstream_request (modify request params)
--- http_config eval: $::HttpConfig
--- config
location /events_2 {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("before_upstream_request", function(params)
params.path = "/modified"
end)
handler:run()
}
}
location /modified {
echo "ORIGIN";
}
--- request
GET /events_2
--- error_code: 200
--- response_body
ORIGIN
--- no_error_log
[error]
================================================
FILE: t/02-integration/gc.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config(extra_nginx_config => qq{
lua_check_client_abort on;
}, extra_lua_config => qq{
require("ledge").set_handler_defaults({
keep_cache_for = 0,
})
}, run_worker => 1);
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Prime cache
--- http_config eval: $::HttpConfig
--- config
location /gc_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /gc {
more_set_headers "Cache-Control: public, max-age=60";
echo "OK";
}
--- request
GET /gc_prx
--- no_error_log
[error]
--- response_body
OK
=== TEST 2: Force revaldation (creates new entity)
--- http_config eval: $::HttpConfig
--- config
location /gc_prx {
rewrite ^(.*)_prx$ $1 break;
echo_location_async '/gc_a';
echo_sleep 0.1;
echo_location_async '/gc_b';
echo_sleep 2.5;
}
location /gc_a {
rewrite ^(.*)_a$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run();
}
}
location /gc_b {
rewrite ^(.*)_b$ $1 break;
content_by_lua_block {
local redis = require("ledge").create_redis_connection()
local handler = require("ledge").create_handler()
handler.redis = redis
local key_chain = handler:cache_key_chain()
local num_entities, err = redis:scard(key_chain.entities)
ngx.say(num_entities)
}
}
location /gc {
more_set_headers "Cache-Control: public, max-age=5";
content_by_lua_block {
ngx.say("UPDATED")
}
}
--- more_headers
Cache-Control: no-cache
--- request
GET /gc_prx
--- response_body
UPDATED
1
--- wait: 1
=== TEST 3: Check we now have just one entity
--- http_config eval: $::HttpConfig
--- config
location /gc {
content_by_lua_block {
local redis = require("ledge").create_redis_connection()
local handler = require("ledge").create_handler()
handler.redis = redis
local key_chain = handler:cache_key_chain()
local num_entities, err = redis:scard(key_chain.entities)
ngx.say(num_entities)
}
}
--- request
GET /gc
--- no_error_log
[error]
--- response_body
1
--- wait: 2
=== TEST 4: Entity will have expired, check Redis has cleaned up all keys.
--- http_config eval: $::HttpConfig
--- config
location /gc {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local redis = require("ledge").create_redis_connection()
local handler = require("ledge").create_handler()
handler.redis = redis
local key_chain = handler:cache_key_chain()
local res, err = redis:keys(key_chain.full .. "*")
assert(not next(res), "res should be empty")
}
}
--- request
GET /gc
--- no_error_log
[error]
=== TEST 5: Prime cache
--- http_config eval: $::HttpConfig
--- config
location /gc_5_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /gc_5 {
more_set_headers "Cache-Control: public, max-age=60";
echo "OK";
}
--- request
GET /gc_5_prx
--- no_error_log
[error]
--- response_body
OK
=== TEST 5b: Delete one part of the key chain
Simulate eviction under memory pressure. Will cause a MISS.
--- http_config eval: $::HttpConfig
--- config
location /gc_5_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local redis = require("ledge").create_redis_connection()
local handler = require("ledge").create_handler()
handler.redis = redis
local key_chain = handler:cache_key_chain()
redis:del(key_chain.headers)
handler:run()
}
}
location /gc_5 {
more_set_headers "Cache-Control: public, max-age=60";
echo "OK 2";
}
--- request
GET /gc_5_prx
--- wait: 3
--- no_error_log
[error]
--- response_body
OK 2
=== TEST 5c: Missing keys should cause colleciton of the old entity.
--- http_config eval: $::HttpConfig
--- config
location /gc_5 {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local redis = require("ledge").create_redis_connection()
local handler = require("ledge").create_handler()
handler.redis = redis
local key_chain = handler:cache_key_chain()
local res, err = redis:keys(key_chain.full .. "*")
if res then
ngx.say(#res)
end
}
}
--- request
GET /gc_5
--- no_error_log
[error]
--- response_body
5
================================================
FILE: t/02-integration/gzip.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config();
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Prime gzipped response
--- http_config eval: $::HttpConfig
--- config
location /gzip_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /gzip {
gzip on;
gzip_proxied any;
gzip_min_length 1;
gzip_http_version 1.0;
default_type text/html;
more_set_headers "Cache-Control: public, max-age=600";
more_set_headers "Content-Type: text/html";
echo "OK";
}
--- request
GET /gzip_prx
--- more_headers
Accept-Encoding: gzip
--- response_body_unlike: OK
--- no_error_log
[error]
=== TEST 2: Client doesnt support gzip, gets plain response
--- http_config eval: $::HttpConfig
--- config
location /gzip_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- request
GET /gzip_prx
--- response_body
OK
--- no_error_log
[error]
=== TEST 2b: Client doesnt support gzip, gunzip is disabled, gets zipped response
--- http_config eval: $::HttpConfig
--- config
location /gzip_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
gunzip_enabled = false,
}):run()
}
}
--- request
GET /gzip_prx
--- response_body_unlike: OK
--- no_error_log
[error]
=== TEST 3: Client does support gzip, gets zipped response
--- http_config eval: $::HttpConfig
--- config
location /gzip_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- request
GET /gzip_prx
--- more_headers
Accept-Encoding: gzip
--- response_body_unlike: OK
--- no_error_log
[error]
=== TEST 4: Client does support gzip, but sends a range, gets plain full response
--- http_config eval: $::HttpConfig
--- config
location /gzip_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- request
GET /gzip_prx
--- more_headers
Accept-Encoding: gzip
--- more_headers
Range: bytes=0-0
--- error_code: 200
--- response_body
OK
--- no_error_log
[error]
=== TEST 5: Prime gzipped response with ESI, auto unzips.
--- http_config eval: $::HttpConfig
--- config
location /gzip_5_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
esi_enabled = true,
}):run()
}
}
location /gzip_5 {
gzip on;
gzip_proxied any;
gzip_min_length 1;
gzip_http_version 1.0;
default_type text/html;
more_set_headers "Cache-Control: public, max-age=600";
more_set_headers "Content-Type: text/html";
more_set_headers 'Surrogate-Control: content="ESI/1.0"';
echo "OK";
}
--- request
GET /gzip_5_prx
--- more_headers
Accept-Encoding: gzip
--- response_body
OK
--- no_error_log
[error]
=== TEST 6: Client does support gzip, but content had to be unzipped on save
--- http_config eval: $::HttpConfig
--- config
location /gzip_5_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- request
GET /gzip_5_prx
--- more_headers
Accept-Encoding: gzip
--- response_body
OK
--- no_error_log
[error]
=== TEST 7: HEAD request for gzipped response with ESI, auto unzips.
--- http_config eval: $::HttpConfig
--- config
location /gzip_7_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
esi_enabled = true,
}):run()
}
}
location /gzip_7 {
gzip on;
gzip_proxied any;
gzip_min_length 1;
gzip_http_version 1.0;
default_type text/html;
more_set_headers "Cache-Control: public, max-age=600";
more_set_headers "Content-Type: text/html";
more_set_headers 'Surrogate-Control: content="ESI/1.0"';
echo "OK";
}
--- request
HEAD /gzip_7_prx
--- more_headers
Accept-Encoding: gzip
--- response_body
--- no_error_log
[error]
================================================
FILE: t/02-integration/hop_by_hop_headers.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config();
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Test hop-by-hop headers are not passed on.
--- http_config eval: $::HttpConfig
--- config
location /hop_by_hop_headers_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /hop_by_hop_headers {
more_set_headers "Cache-Control public, max-age=600";
more_set_headers "Proxy-Authenticate foo";
more_set_headers "Upgrade foo";
echo "OK";
}
--- request
GET /hop_by_hop_headers_prx
--- response_headers
Proxy-Authenticate:
Upgrade:
--- no_error_log
[error]
=== TEST 2: Test hop-by-hop headers were not cached.
--- http_config eval: $::HttpConfig
--- config
location /hop_by_hop_headers_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- request
GET /hop_by_hop_headers_prx
--- response_headers
Proxy-Authenticate:
Upgrade:
--- no_error_log
[error]
================================================
FILE: t/02-integration/max-stale.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config(extra_lua_config => qq{
package.loaded["state"] = {
miss_count = 0,
}
}, run_worker => 1);
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Honour max-stale request header for an expired item
--- http_config eval: $::HttpConfig
--- config
location /stale_1_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("before_save", function(res)
-- immediately expire
res.header["Cache-Control"] = "max-age=0"
end)
handler:run()
}
}
location /stale_1 {
content_by_lua_block {
local state = require("state")
state.miss_count = state.miss_count + 1
ngx.status = 404
ngx.header["Cache-Control"] = "max-age=60"
ngx.print("TEST 1: ", state.miss_count)
}
}
--- more_headers
Cache-Control: max-stale=1000
--- request eval
["GET /stale_1_prx", "GET /stale_1_prx"]
--- response_body eval
["TEST 1: 1", "TEST 1: 1"]
--- response_headers_like eval
["", 'Warning: 110 (?:[^\s]*) "Response is stale"']
--- error_code eval
[404, 404]
--- no_error_log
[error]
=== TEST 1b: Confirm nothing was revalidated in the background
--- http_config eval: $::HttpConfig
--- config
location /stale_1_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- more_headers
Cache-Control: max-stale=1000
--- request
GET /stale_1_prx
--- response_body: TEST 1: 1
--- response_headers_like
Warning: 110 (?:[^\s]*) "Response is stale"
--- error_code eval
404
--- no_error_log
[error]
=== TEST 5: proxy-revalidate must revalidate (not serve stale)
--- http_config eval: $::HttpConfig
--- config
location /stale_5_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge.state_machine").set_debug(true)
local handler = require("ledge").create_handler()
handler:bind("before_save", function(res)
-- immediately expire
res.header["Cache-Control"] = "max-age=0, proxy-revalidate"
end)
handler:run()
}
}
location /stale_5 {
content_by_lua_block {
local state = require("state")
state.miss_count = state.miss_count + 1
ngx.status = 404
ngx.header["Cache-Control"] = "max-age=3600, proxy-revalidate"
ngx.print("TEST 5: ", state.miss_count)
}
}
--- more_headers
Cache-Control: max-stale=120
--- request eval
["GET /stale_5_prx", "GET /stale_5_prx"]
--- response_body eval
["TEST 5: 1", "TEST 5: 2"]
--- raw_response_headers_unlike eval
["Warning: 110", "Warning: 110"]
--- error_code eval
[404, 404]
--- no_error_log
[error]
=== TEST 6: must-revalidate must revalidate (not serve stale)
--- http_config eval: $::HttpConfig
--- config
location /stale_6_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("before_save", function(res)
-- immediately expire
res.header["Cache-Control"] = "max-age=0, must-revalidate"
end)
handler:run()
}
}
location /stale_6 {
content_by_lua_block {
local state = require("state")
state.miss_count = state.miss_count + 1
ngx.status = 404
ngx.header["Cache-Control"] = "max-age=3600, must-revalidate"
ngx.print("TEST 6: ", state.miss_count)
}
}
--- more_headers
Cache-Control: max-stale=120
--- request eval
["GET /stale_6_prx", "GET /stale_6_prx"]
--- response_body eval
["TEST 6: 1", "TEST 6: 2"]
--- raw_response_headers_unlike eval
["Warning: 110", "Warning: 110"]
--- error_code eval
[404, 404]
--- no_error_log
[error]
=== TEST 7: Can serve stale but must revalidate because of Age
--- http_config eval: $::HttpConfig
--- config
location /stale_7_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("before_save", function(res)
-- immediately expire
res.header["Cache-Control"] = "max-age=0"
end)
handler:run()
}
}
location /stale_7 {
content_by_lua_block {
local state = require("state")
state.miss_count = state.miss_count + 1
ngx.status = 404
ngx.header["Cache-Control"] = "max-age=3600"
ngx.print("TEST 7: ", state.miss_count)
}
}
--- more_headers
Cache-Control: max-stale=120, max-age=1
--- request eval
["GET /stale_7_prx", "GET /stale_7_prx"]
--- response_body eval
["TEST 7: 1", "TEST 7: 2"]
--- raw_response_headers_unlike eval
["Warning: 110", "Warning: 110"]
--- error_code eval
[404, 404]
--- no_error_log
[error]
--- wait: 2
================================================
FILE: t/02-integration/max_size.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config(extra_lua_config => qq{
require("ledge").set_handler_defaults({
storage_driver_config = {
max_size = 8,
}
})
});
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Response larger than cache_max_memory.
--- http_config eval: $::HttpConfig
--- config
location /max_memory_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /max_memory {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("RESPONSE IS TOO LARGE TEST 1")
}
}
--- request
GET /max_memory_prx
--- response_body
RESPONSE IS TOO LARGE TEST 1
--- response_headers_like
X-Cache: MISS from .*
--- error_log
storage failed to write: body is larger than 8 bytes
=== TEST 2: Test we did not store in previous test.
--- http_config eval: $::HttpConfig
--- config
location /max_memory_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /max_memory {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("TEST 2")
}
}
--- request
GET /max_memory_prx
--- response_body
TEST 2
--- response_headers_like
X-Cache: MISS from .*
--- no_error_log
=== TEST 3: Non-chunked response larger than cache_max_memory.
--- http_config eval: $::HttpConfig
--- config
location /max_memory_3_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /max_memory_3 {
chunked_transfer_encoding off;
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
local body = "RESPONSE IS TOO LARGE TEST 3\n"
ngx.header["Content-Length"] = string.len(body)
ngx.print(body)
}
}
--- request
GET /max_memory_3_prx
--- response_body
RESPONSE IS TOO LARGE TEST 3
--- response_headers_like
X-Cache: MISS from .*
--- no_error_log
=== TEST 4: Test we did not store in previous test.
--- http_config eval: $::HttpConfig
--- config
location /max_memory_3_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /max_memory_3 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("TEST 4")
}
}
--- request
GET /max_memory_3_prx
--- response_body
TEST 4
--- response_headers_like
X-Cache: MISS from .*
--- no_error_log
=== TEST 5a: Prime cache with ok size
--- http_config eval: $::HttpConfig
--- config
location /max_memory_5_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /max_memory_5 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("OK")
}
}
--- request
GET /max_memory_5_prx
--- response_body
OK
--- response_headers_like
X-Cache: MISS from .*
--- no_error_log
[error]
=== TEST 5b: Try to replace with a large response
--- http_config eval: $::HttpConfig
--- config
location /max_memory_5_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /max_memory_5 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("RESPONSE IS TOO LARGE")
}
}
--- more_headers
Cache-Control: no-cache
--- request
GET /max_memory_5_prx
--- response_body
RESPONSE IS TOO LARGE
--- response_headers_like
X-Cache: MISS from .*
--- error_log
larger than 8 bytes
=== TEST 5c: Confirm original cache is still ok
--- http_config eval: $::HttpConfig
--- config
location /max_memory_5_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- request
GET /max_memory_5_prx
--- response_body
OK
--- response_headers_like
X-Cache: HIT from .*
--- no_error_log
[error]
================================================
FILE: t/02-integration/memory_pressure.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config(extra_lua_config => qq{
require("ledge").set_handler_defaults({
esi_enabled = true,
})
}, run_worker => 1);
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Prime some cache
--- http_config eval: $::HttpConfig
--- config
location "/mem_pressure_1_prx" {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location "/mem_pressure_1" {
default_type text/html;
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.header["Surrogate-Control"] = [[content="ESI/1.0"]]
ngx.print("Key: ", ngx.req.get_uri_args()["key"])
}
}
--- request eval
["GET /mem_pressure_1_prx?key=main",
"GET /mem_pressure_1_prx?key=headers",
"GET /mem_pressure_1_prx?key=entities"]
--- response_body eval
["Key: main",
"Key: headers",
"Key: entities"]
--- no_error_log
[error]
=== TEST 1b: Break each key, in a different way for each, then try to serve
--- http_config eval: $::HttpConfig
--- config
location "/mem_pressure_1_prx" {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local redis = require("ledge").create_redis_connection()
local handler = require("ledge").create_handler()
handler.redis = redis
local key_chain = handler:cache_key_chain()
local evict = ngx.req.get_uri_args()["key"]
local key = key_chain[evict]
ngx.log(ngx.DEBUG, "will evict: ", key)
local res, err = redis:del(key)
if not res then
ngx.log(ngx.ERR, "could not evict: ", err)
end
redis:set(evict, "true")
ngx.log(ngx.DEBUG, tostring(res))
redis:close()
handler:run()
}
}
location "/mem_pressure_1" {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=0"
ngx.print("MISSED: ", ngx.req.get_uri_args()["key"])
}
}
--- request eval
["GET /mem_pressure_1_prx?key=main",
"GET /mem_pressure_1_prx?key=headers",
"GET /mem_pressure_1_prx?key=entities"]
--- response_body eval
["MISSED: main",
"MISSED: headers",
"MISSED: entities"]
--- no_error_log
[error]
=== TEST 2: Prime and break ::main before transaction completes
(leaves it partial)
--- http_config eval: $::HttpConfig
--- config
location "/mem_pressure_2_prx" {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("before_serve", function(res)
local main = handler:cache_key_chain().main
handler.redis:del(main)
end)
handler:run()
}
}
location "/mem_pressure_2" {
default_type text/html;
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.print("ORIGIN")
}
}
--- request
GET /mem_pressure_2_prx
--- response_body: ORIGIN
--- no_error_log
[error]
=== TEST 2b: Confirm broken ::main doesnt get served
--- http_config eval: $::HttpConfig
--- config
location "/mem_pressure_2_prx" {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location "/mem_pressure_2" {
default_type text/html;
content_by_lua_block {
ngx.print("ORIGIN")
}
}
--- request
GET /mem_pressure_2_prx
--- response_body: ORIGIN
--- no_error_log
[error]
=== TEST 3: Prime and break active entity during read
--- http_config eval: $::HttpConfig
--- config
location "/mem_pressure_3_prx" {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
if not ngx.req.get_uri_args()["prime"] then
handler:bind("before_serve", function(res)
ngx.log(ngx.DEBUG, "Deleting: ", res.entity_id)
handler.storage:delete(res.entity_id)
end)
else
-- Dummy log for prime request
ngx.log(ngx.DEBUG, "entity removed during read")
end
ngx.req.set_uri_args({})
handler:run()
}
}
location "/mem_pressure_3" {
default_type text/html;
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.print("ORIGIN")
}
}
--- request eval
["GET /mem_pressure_3_prx?prime=true", "GET /mem_pressure_3_prx"]
--- response_body eval
["ORIGIN", ""]
--- response_headers_like eval
["X-Cache: MISS from .*", "X-Cache: HIT from .*"]
--- no_error_log
[error]
--- error_log
entity removed during read
=== TEST 4: Prime some cache - stale headers
--- http_config eval: $::HttpConfig
--- config
location "/mem_pressure_4_prx" {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location "/mem_pressure_4" {
default_type text/html;
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600, stale-if-error=2592000, stale-while-revalidate=129600"
ngx.header["Surrogate-Control"] = [[content="ESI/1.0"]]
ngx.print("Key: ", ngx.req.get_uri_args()["key"])
}
}
--- request eval
["GET /mem_pressure_4_prx?key=main",
"GET /mem_pressure_4_prx?key=headers",
"GET /mem_pressure_4_prx?key=entities"]
--- response_body eval
["Key: main",
"Key: headers",
"Key: entities"]
--- no_error_log
[error]
=== TEST 4b: Break each key, in a different way for each, then try to serve
--- http_config eval: $::HttpConfig
--- config
location "/mem_pressure_4_prx" {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local redis = require("ledge").create_redis_connection()
local handler = require("ledge").create_handler()
handler.redis = redis
local key_chain = handler:cache_key_chain()
local evict = ngx.req.get_uri_args()["key"]
local key = key_chain[evict]
ngx.log(ngx.DEBUG, "will evict: ", key)
local res, err = redis:del(key)
if not res then
ngx.log(ngx.ERR, "could not evict: ", err)
end
redis:set(evict, "true")
ngx.log(ngx.DEBUG, tostring(res))
redis:close()
handler:run()
}
}
location "/mem_pressure_4" {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=0"
ngx.print("MISSED: ", ngx.req.get_uri_args()["key"])
}
}
--- request eval
["GET /mem_pressure_4_prx?key=main",
"GET /mem_pressure_4_prx?key=headers",
"GET /mem_pressure_4_prx?key=entities"]
--- response_body eval
["MISSED: main",
"MISSED: headers",
"MISSED: entities"]
--- no_error_log
[error]
=== TEST 5: Prime and break active entity during read - ESI
--- http_config eval: $::HttpConfig
--- config
location "/mem_pressure_5_prx" {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
if not ngx.req.get_uri_args()["prime"] then
handler:bind("before_serve", function(res)
ngx.log(ngx.DEBUG, "Deleting: ", res.entity_id)
handler.storage:delete(res.entity_id)
end)
else
-- Dummy log for prime request
require("ledge.state_machine").set_debug(true)
ngx.log(ngx.DEBUG, "entity removed during read")
end
ngx.req.set_uri_args({})
handler:run()
}
}
location "/mem_pressure_5" {
default_type text/html;
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.header["Surrogate-Control"] = 'content="ESI/1.0"'
ngx.print("ORIGIN")
ngx.print("$(QUERY_STRING)")
}
}
--- request eval
["GET /mem_pressure_5_prx?prime=true", "GET /mem_pressure_5_prx"]
--- response_body eval
["ORIGIN", ""]
--- response_headers_like eval
["X-Cache: MISS from .*", "X-Cache: HIT from .*"]
--- no_error_log
[error]
--- error_log
entity removed during read
================================================
FILE: t/02-integration/multiple_headers.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config();
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Multiple cache-control response headers, miss
--- http_config eval: $::HttpConfig
--- config
location /multiple_cache_headers_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /multiple_cache_headers {
content_by_lua_block {
ngx.header["Cache-Control"] = { "public", "max-age=3600"}
ngx.say("TEST 1")
}
}
--- request
GET /multiple_cache_headers_prx
--- response_headers_like
X-Cache: MISS from .*
--- response_headers
Cache-Control: public, max-age=3600
--- response_body
TEST 1
=== TEST 1b: Multiple cache-control response headers, hit
--- http_config eval: $::HttpConfig
--- config
location /multiple_cache_headers_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /multiple_cache_headers {
content_by_lua_block {
ngx.header["Cache-Control"] = { "public", "max-age=3600"}
ngx.say("TEST 2")
}
}
--- request
GET /multiple_cache_headers_prx
--- response_headers_like
X-Cache: HIT from .*
--- response_headers
Cache-Control: public, max-age=3600
--- response_body
TEST 1
=== TEST 2: Multiple Date response headers, miss
--- http_config eval: $::HttpConfig
--- config
location /multiple_date_headers_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
upstream_port = 12345
}):run()
}
}
--- request
GET /multiple_date_headers_prx
--- tcp_listen: 12345
--- tcp_reply
HTTP/1.1 200 OK
Date: Mon, 24 Sep 2018 00:47:20 GMT
Server: Apache/2
Date: Mon, 24 Sep 2018 01:47:20 GMT
Cache-Control: public, max-age=300
TEST 2
--- response_headers_like
X-Cache: MISS from .*
--- response_headers_unlike
Date: Mon, 24 Sep 2018 00:47:20 GMT
Date: Mon, 24 Sep 2018 01:47:20 GMT
--- response_body
TEST 2
=== TEST 2b: Multiple Date response headers, hit
--- http_config eval: $::HttpConfig
--- config
location /multiple_date_headers_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- request
GET /multiple_date_headers_prx
--- response_headers_like
X-Cache: HIT from .*
--- response_headers_unlike
Date: Mon, 24 Sep 2018 00:47:20 GMT
Date: Mon, 24 Sep 2018 01:47:20 GMT
--- response_body
TEST 2
================================================
FILE: t/02-integration/on_abort.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config(extra_nginx_config => qq{
lua_check_client_abort on;
upstream test-upstream {
server 127.0.0.1:1984;
keepalive 16;
}
}, run_worker => 1);
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Warning when unable to set client abort handler
--- http_config eval: $::HttpConfig
--- config
location /abort_prx {
rewrite ^(.*)_prx$ $1 break;
lua_check_client_abort off;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /abort {
echo "foo";
}
--- request
GET /abort_prx
--- error_log
on_abort handler could not be set: lua_check_client_abort is off
=== TEST 2a: Client abort mid save should still save to cache (run and abort)
--- http_config eval: $::HttpConfig
--- config
location /abort_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /abort {
content_by_lua_block {
ngx.status = 200
ngx.header["Cache-Control"] = "public, max-age=3600"
ngx.say("START")
ngx.flush(true)
ngx.sleep(2)
ngx.say("FINISH")
}
}
--- request
GET /abort_prx
--- timeout: 1
--- wait: 1.5
--- abort
--- ignore_response
--- no_error_log
[error]
=== TEST 2b: Prove we have a complete cache entry
--- http_config eval: $::HttpConfig
--- config
location /abort_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- request
GET /abort_prx
--- response_body
START
FINISH
--- error_code: 200
--- no_error_log
[error]
=== TEST 3a: Client abort before save aborts fetching
--- http_config eval: $::HttpConfig
--- config
location /abort_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /abort {
content_by_lua_block {
ngx.sleep(2)
ngx.status = 200
ngx.header["Cache-Control"] = "public, max-age=3600"
ngx.say("START 2")
ngx.say("FINISH 2")
}
}
--- request
GET /abort_prx
--- more_headers
Cache-Control: max-age=0
--- timeout: 1
--- wait: 1.5
--- abort
--- ignore_response
--- no_error_log
[error]
=== TEST 3b: Prove we still have the previous cache entry
--- http_config eval: $::HttpConfig
--- config
location /abort_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- request
GET /abort_prx
--- response_body
START
FINISH
--- error_code: 200
--- no_error_log
[error]
=== TEST 4a: Prime immediately expiring cache item
--- http_config eval: $::HttpConfig
--- config
location /abort_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("before_save", function(res)
-- immediately expire cache entries
res.header["Cache-Control"] = "max-age=0"
end)
handler:run()
}
}
location /abort {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("OK")
}
}
--- more_headers
Cache-Control: no-cache
--- request
GET /abort_prx
--- response_body
OK
--- error_code: 200
--- no_error_log
[error]
=== TEST 4b: Client abort before fetch with collapsed forwarding on cancels abort
--- http_config eval: $::HttpConfig
--- config
location /abort_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
enable_collapsed_forwarding = true,
})
handler:bind("before_upstream_request", function(res)
ngx.sleep(2)
end)
handler:run()
}
}
location /abort {
content_by_lua_block {
ngx.status = 200
ngx.header["Cache-Control"] = "public, max-age=3600"
ngx.say("START")
ngx.say("FINISH")
}
}
--- request
GET /abort_prx
--- timeout: 1
--- wait: 1.5
--- abort
--- ignore_response
--- no_error_log
[error]
=== TEST 4c: Prove we have the previous cache entry
--- http_config eval: $::HttpConfig
--- config
location /abort_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- request
GET /abort_prx
--- response_body
START
FINISH
--- error_code: 200
--- no_error_log
[error]
=== TEST 5: No error when keepalive_requests exceeded
--- http_config eval: $::HttpConfig
--- config
location = /abort_top {
content_by_lua_block {
local http = require "resty.http"
local httpc = http.new()
local res, err = httpc:request_uri(
"http://" ..
ngx.var.server_addr .. ":" .. ngx.var.server_port ..
"/abort_ngx"
)
if not res then
ngx.log(ngx.ERR, err)
end
local res, err = httpc:request_uri(
"http://" ..
ngx.var.server_addr .. ":" .. ngx.var.server_port ..
"/abort_ngx"
)
if not res then
ngx.log(ngx.ERR, err)
end
ngx.say("OK")
}
}
location = /abort_ngx {
rewrite ^ /abort_prx break;
proxy_pass http://test-upstream;
}
location = /abort_prx {
rewrite ^(.*)_prx$ $1 break;
keepalive_requests 1;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location = /abort {
content_by_lua_block {
ngx.status = 200
ngx.header["Cache-Control"] = "public, max-age=3600"
ngx.say("START")
ngx.say("FINISH")
}
}
--- request
GET /abort_top
--- response_body
OK
--- error_code: 200
--- no_error_log
[error]
================================================
FILE: t/02-integration/origin_mode.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config();
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: ORIGIN_MODE_NORMAL
--- http_config eval: $::HttpConfig
--- config
location /origin_mode_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
origin_mode = require("ledge").ORIGIN_MODE_NORMAL
}):run()
}
}
location /origin_mode {
content_by_lua_block {
ngx.header["Cache-Control"] = "public, max-age=60"
ngx.print("OK")
}
}
--- request eval
["GET /origin_mode_prx", "GET /origin_mode_prx"]
--- response_body eval
["OK", "OK"]
--- response_headers_like eval
["X-Cache: MISS from .*", "X-Cache: HIT from .*"]
--- no_error_log
[error]
=== TEST 2: ORIGIN_MODE_AVOID (no-cache request)
--- http_config eval: $::HttpConfig
--- config
location /origin_mode_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
origin_mode = require("ledge").ORIGIN_MODE_AVOID
}):run()
}
}
--- more_headers
Cache-Control: no-cache
--- request
GET /origin_mode_prx
--- response_body: OK
--- response_headers_like
X-Cache: HIT from .*
--- no_error_log
[error]
=== TEST 2a: ORIGIN_MODE_AVOID (max-age=0 request)
--- http_config eval: $::HttpConfig
--- config
location /origin_mode_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
origin_mode = require("ledge").ORIGIN_MODE_AVOID
}):run()
}
}
--- more_headers
Cache-Control: max-age=0
--- request
GET /origin_mode_prx
--- response_body: OK
--- response_headers_like
X-Cache: HIT from .*
--- no_error_log
[error]
=== TEST 2b: ORIGIN_MODE_AVOID (expired cache)
--- http_config eval: $::HttpConfig
--- config
location /origin_mode_2b_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
origin_mode = require("ledge").ORIGIN_MODE_AVOID
})
handler:bind("before_save", function(res)
-- immediately expire
res.header["Cache-Control"] = "max-age=0"
end)
handler:run()
}
}
location /origin_mode_2b {
content_by_lua_block {
ngx.header["Cache-Control"] = "public, max-age=60"
ngx.print("OK")
}
}
--- request eval
["GET /origin_mode_2b_prx", "GET /origin_mode_2b_prx"]
--- response_body eval
["OK", "OK"]
--- response_headers_like eval
["X-Cache: MISS from .*", "X-Cache: HIT from .*"]
--- no_error_log
[error]
=== TEST 3: ORIGIN_MODE_BYPASS when cached with 112 warning
--- http_config eval: $::HttpConfig
--- config
location /origin_mode_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
origin_mode = require("ledge").ORIGIN_MODE_BYPASS
}):run()
}
}
--- more_headers
Cache-Control: no-cache
--- request
GET /origin_mode_prx
--- response_headers_like
Warning: 112 .*
--- response_body: OK
--- no_error_log
[error]
=== TEST 4: ORIGIN_MODE_BYPASS when we have nothing
--- http_config eval: $::HttpConfig
--- config
location /origin_mode_bypass_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
origin_mode = require("ledge").ORIGIN_MODE_BYPASS
}):run()
}
}
--- more_headers
Cache-Control: no-cache
--- request
GET /origin_mode_bypass_prx
--- error_code: 503
--- no_error_log
[error]
================================================
FILE: t/02-integration/purge.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config(extra_nginx_config => qq{
lua_shared_dict ledge_test 1m;
}, extra_lua_config => qq{
function format_json(json, prefix)
local decode = require("cjson").decode
if type(json) == "string" then
local ok
ok, json = pcall(decode, json)
if not ok then return "" end
end
local keys = {}
for k, v in pairs(json) do
table.insert(keys, k)
end
table.sort(keys)
local fmt = "%s: %s\\n"
local out = ""
for i, k in ipairs(keys) do
local key = k
if prefix then
key = prefix.."."..k
end
if type(json[k]) == "table" then
out = out .. format_json(json[k], key)
else
out = out .. fmt:format(key, json[k])
end
end
return out
end
}, run_worker => 1);
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Prime cache for subsequent tests
--- http_config eval: $::HttpConfig
--- config
location /purge_cached_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /purge_cached {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("TEST 1")
}
}
--- request
GET /purge_cached_prx
--- no_error_log
[error]
--- response_body
TEST 1
=== TEST 2: Purge cache
--- http_config eval: $::HttpConfig
--- config
location /purge_cached {
content_by_lua_block {
require("ledge").create_handler():run()
}
body_filter_by_lua_block {
ngx.arg[1] = format_json(ngx.arg[1])
ngx.arg[2] = true
}
}
--- request eval
["PURGE /purge_cached", "PURGE /purge_cached"]
--- no_error_log
[error]
--- response_body eval
[
'purge_mode: invalidate
result: purged
',
'purge_mode: invalidate
result: already expired
']
--- error_code eval
[200, 404]
=== TEST 3: Cache has been purged
--- http_config eval: $::HttpConfig
--- config
location /purge_cached_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /purge_cached {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("TEST 3")
}
}
--- request
GET /purge_cached_prx
--- no_error_log
[error]
--- response_body
TEST 3
=== TEST 4: Purge on unknown key returns 404
--- http_config eval: $::HttpConfig
--- config
location /foobar {
content_by_lua_block {
require("ledge").create_handler():run()
}
body_filter_by_lua_block {
ngx.arg[1] = format_json(ngx.arg[1])
ngx.arg[2] = true
}
}
--- request
PURGE /foobar
--- no_error_log
[error]
--- response_body
purge_mode: invalidate
result: nothing to purge
--- error_code: 404
=== TEST 5a: Prime another key with args
--- http_config eval: $::HttpConfig
--- config
location /purge_cached_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler({
keep_cache_for = 0,
}):run()
}
}
location /purge_cached {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("TEST 5")
}
}
--- request
GET /purge_cached_prx?t=1
--- no_error_log
[error]
--- response_body
TEST 5
=== TEST 5b: Wildcard Purge
--- http_config eval: $::HttpConfig
--- config
location /purge_cached {
content_by_lua_block {
require("ledge").create_handler():run()
}
body_filter_by_lua_block {
ngx.arg[1] = format_json(ngx.arg[1])
ngx.arg[2] = true
}
}
--- request
PURGE /purge_cached*
--- wait: 1
--- no_error_log
[error]
--- response_body_like
purge_mode: invalidate
qless_jobs.1.jid: [a-f0-9]{32}
qless_jobs.1.klass: ledge.jobs.purge
qless_jobs.1.options.jid: [a-f0-9]{32}
qless_jobs.1.options.priority: 5
qless_jobs.1.options.tags.1: purge
result: scheduled
--- error_code: 200
=== TEST 5c: Cache has been purged with args
--- http_config eval: $::HttpConfig
--- config
location /purge_cached_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
keep_cache_for = 0,
}):run()
}
}
location /purge_cached {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("TEST 5c")
}
}
--- request
GET /purge_cached_prx?t=1
--- no_error_log
[error]
--- response_body
TEST 5c
=== TEST 5d: Cache has been purged without args
--- http_config eval: $::HttpConfig
--- config
location /purge_cached_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
keep_cache_for = 0,
}):run()
}
}
location /purge_cached {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("TEST 5d")
}
}
--- request
GET /purge_cached_prx
--- no_error_log
[error]
--- response_body
TEST 5d
=== TEST 6a: Purge everything
--- http_config eval: $::HttpConfig
--- config
location /purge_c {
content_by_lua_block {
require("ledge").create_handler():run()
}
body_filter_by_lua_block {
ngx.arg[1] = format_json(ngx.arg[1])
ngx.arg[2] = true
}
}
--- request
PURGE /purge_c*
--- wait: 3
--- error_code: 200
--- response_body_like
purge_mode: invalidate
qless_jobs.1.jid: [a-f0-9]{32}
qless_jobs.1.klass: ledge.jobs.purge
qless_jobs.1.options.jid: [a-f0-9]{32}
qless_jobs.1.options.priority: 5
qless_jobs.1.options.tags.1: purge
--- no_error_log
[error]
=== TEST 6: Cache keys have been collected by Redis
--- http_config eval: $::HttpConfig
--- config
location /purge_cached {
content_by_lua_block {
local redis = require("ledge").create_redis_connection()
local handler = require("ledge").create_handler()
handler.redis = redis
local key_chain = handler:cache_key_chain()
local num_entities, err = redis:scard(key_chain.entities)
ngx.say("entities: ", num_entities)
}
}
--- request
GET /purge_cached
--- no_error_log
[error]
--- response_body
entities: 0
=== TEST 7a: Prime another key with args
--- http_config eval: $::HttpConfig
--- config
location /purge_cached_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /purge_cached {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("TEST 5")
}
}
--- request
GET /purge_cached_prx?t=1
--- no_error_log
[error]
--- response_body
TEST 5
=== TEST 7b: Wildcard Purge, mid path (no match due to args)
--- http_config eval: $::HttpConfig
--- config
location /purge_c {
content_by_lua_block {
require("ledge").create_handler():run()
}
body_filter_by_lua_block {
ngx.arg[1] = format_json(ngx.arg[1])
ngx.arg[2] = true
}
}
--- request
PURGE /purge_ca*ed
--- wait: 1
--- no_error_log
[error]
--- response_body_like
purge_mode: invalidate
qless_jobs.1.jid: [a-f0-9]{32}
qless_jobs.1.klass: ledge.jobs.purge
qless_jobs.1.options.jid: [a-f0-9]{32}
qless_jobs.1.options.priority: 5
qless_jobs.1.options.tags.1: purge
result: scheduled
--- error_code: 200
=== TEST 7c: Confirm purge did nothing
--- http_config eval: $::HttpConfig
--- config
location /purge_cached_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- request
GET /purge_cached_prx?t=1
--- no_error_log
[error]
--- response_body
TEST 5
=== TEST 8a: Prime another key - with keep_cache_for set
--- http_config eval: $::HttpConfig
--- config
location /purge_cached_8_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
keep_cache_for = 3600,
}):run()
}
}
location /purge_cached_8 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("TEST 8")
}
}
--- request
GET /purge_cached_8_prx
--- no_error_log
[error]
--- response_body
TEST 8
=== TEST 8b: Wildcard Purge (200)
--- http_config eval: $::HttpConfig
--- config
location /purge_cached_8 {
content_by_lua_block {
require("ledge").create_handler({
keyspace_scan_count = 1,
}):run()
}
body_filter_by_lua_block {
ngx.arg[1] = format_json(ngx.arg[1])
ngx.arg[2] = true
}
}
--- request
PURGE /purge_cached_8*
--- wait: 2
--- no_error_log
[error]
--- response_body_like
purge_mode: invalidate
qless_jobs.1.jid: [a-f0-9]{32}
qless_jobs.1.klass: ledge.jobs.purge
qless_jobs.1.options.jid: [a-f0-9]{32}
qless_jobs.1.options.priority: 5
qless_jobs.1.options.tags.1: purge
result: scheduled
--- error_code: 200
=== TEST 8d: Cache has been purged with args
--- http_config eval: $::HttpConfig
--- config
location /purge_cached_8_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /purge_cached_8 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("TEST 8c")
}
}
--- request
GET /purge_cached_8_prx
--- no_error_log
[error]
--- response_body
TEST 8c
--- error_code: 200
=== TEST 9a: Prime another key
--- http_config eval: $::HttpConfig
--- config
location /purge_cached_9_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
keep_cache_for = 3600,
}):run()
}
}
location /purge_cached_9 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("TEST 9: ", ngx.req.get_headers()["Cookie"])
}
}
--- more_headers
Cookie: primed
--- request
GET /purge_cached_9_prx
--- no_error_log
[error]
--- response_body
TEST 9: primed
=== TEST 9b: Purge with X-Purge: revalidate
--- http_config eval: $::HttpConfig
--- config
location /purge_cached_9_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
body_filter_by_lua_block {
ngx.arg[1] = format_json(ngx.arg[1])
ngx.arg[2] = true
}
}
location /purge_cached_9 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("TEST 9 Revalidated: ", ngx.req.get_headers()["Cookie"])
}
}
--- more_headers
X-Purge: revalidate
--- request
PURGE /purge_cached_9_prx
--- no_error_log
[error]
--- response_body_like
purge_mode: revalidate
qless_jobs.1.jid: [a-f0-9]{32}
qless_jobs.1.klass: ledge.jobs.revalidate
qless_jobs.1.options.jid: [a-f0-9]{32}
qless_jobs.1.options.priority: 4
qless_jobs.1.options.tags.1: revalidate
result: purged
--- error_code: 200
=== TEST 9c: Wait for revalidation to complete
--- http_config eval: $::HttpConfig
--- config
location /purge_cached_9_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /purge_cached_9 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("TEST 9 Revalidated: ", ngx.req.get_headers()["Cookie"])
}
}
location /waiting {
echo "OK";
}
--- request
GET /waiting
--- response_body
OK
--- no_error_log
[error]
--- wait: 5
=== TEST 9d: Confirm cache was revalidated
--- http_config eval: $::HttpConfig
--- config
location /purge_cached_9_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- request
GET /purge_cached_9_prx
--- wait: 3
--- no_error_log
[error]
--- response_body
TEST 9 Revalidated: primed
=== TEST 10a: Prime two keys
--- http_config eval: $::HttpConfig
--- config
location /purge_cached_10_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /purge_cached_10 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.print("TEST 10: ", ngx.req.get_uri_args()["a"], " ", ngx.req.get_headers()["Cookie"])
}
}
--- more_headers
Cookie: primed
--- request eval
[ "GET /purge_cached_10_prx?a=1", "GET /purge_cached_10_prx?a=2" ]
--- no_error_log
[error]
--- response_body eval
[ "TEST 10: 1 primed", "TEST 10: 2 primed" ]
=== TEST 10b: Wildcard purge with X-Purge: revalidate
--- http_config eval: $::HttpConfig
--- config
location /purge_cached_10_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
body_filter_by_lua_block {
ngx.arg[1] = format_json(ngx.arg[1])
ngx.arg[2] = true
}
}
location /purge_cached_10 {
rewrite ^(.*)$ $1_origin break;
content_by_lua_block {
local a = ngx.req.get_uri_args()["a"]
ngx.log(ngx.DEBUG, "TEST 10 Revalidated: ", a, " ", ngx.req.get_headers()["Cookie"])
}
}
--- more_headers
X-Purge: revalidate
--- request
PURGE /purge_cached_10_prx?*
--- wait: 2
--- no_error_log
[error]
--- response_body_like
purge_mode: revalidate
qless_jobs.1.jid: [a-f0-9]{32}
qless_jobs.1.klass: ledge.jobs.purge
qless_jobs.1.options.jid: [a-f0-9]{32}
qless_jobs.1.options.priority: 5
qless_jobs.1.options.tags.1: purge
result: scheduled
--- error_log
TEST 10 Revalidated: 1 primed
TEST 10 Revalidated: 2 primed
--- error_code: 200
=== TEST 11a: Prime a key
--- http_config eval: $::HttpConfig
--- config
location /purge_cached_11_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
keep_cache_for = 3600,
}):run()
}
}
location /purge_cached_11 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.print("TEST 11")
}
}
--- request
GET /purge_cached_11_prx
--- no_error_log
[error]
--- response_body: TEST 11
=== TEST 11b: Purge with X-Purge: delete
--- http_config eval: $::HttpConfig
--- config
location /purge_cached_11_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
keep_cache_for = 3600,
}):run()
}
body_filter_by_lua_block {
ngx.arg[1] = format_json(ngx.arg[1])
ngx.arg[2] = true
}
}
--- more_headers
X-Purge: delete
--- request
PURGE /purge_cached_11_prx
--- no_error_log
[error]
--- response_body
purge_mode: delete
result: deleted
--- error_code: 200
=== TEST 11c: Max-stale request fails as items are properly deleted
--- http_config eval: $::HttpConfig
--- config
location /purge_cached_11_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
keep_cache_for = 3600,
}):run()
}
}
location /purge_cached_11 {
content_by_lua_block {
ngx.print("ORIGIN")
}
}
--- more_headers
Cache-Control: max-stale=1000
--- request
GET /purge_cached_11_prx
--- response_body: ORIGIN
--- no_error_log
[error]
--- error_code: 200
=== TEST 12a: Prime two keys
--- http_config eval: $::HttpConfig
--- config
location /purge_cached_12_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
keep_cache_for = 3600,
}):run()
}
}
location /purge_cached_12 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.print("TEST 12: ", ngx.req.get_uri_args()["a"])
}
}
--- request eval
[ "GET /purge_cached_12_prx?a=1", "GET /purge_cached_12_prx?a=2" ]
--- no_error_log
[error]
--- response_body eval
[ "TEST 12: 1", "TEST 12: 2" ]
=== TEST 12b: Wildcard purge with X-Purge: delete
--- http_config eval: $::HttpConfig
--- config
location /purge_cached_12_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
keep_cache_for = 3600,
}):run()
}
body_filter_by_lua_block {
ngx.arg[1] = format_json(ngx.arg[1])
ngx.arg[2] = true
}
}
--- more_headers
X-Purge: delete
--- request
PURGE /purge_cached_12_prx?*
--- wait: 2
--- no_error_log
[error]
--- response_body_like
purge_mode: delete
qless_jobs.1.jid: [a-f0-9]{32}
qless_jobs.1.klass: ledge.jobs.purge
qless_jobs.1.options.jid: [a-f0-9]{32}
qless_jobs.1.options.priority: 5
qless_jobs.1.options.tags.1: purge
result: scheduled
--- error_code: 200
=== TEST 12c: Max-stale request fails as items are properly deleted
--- http_config eval: $::HttpConfig
--- config
location /purge_cached_12_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
keep_cache_for = 3600,
}):run()
}
}
location /purge_cached_12 {
content_by_lua_block {
ngx.print("ORIGIN: ", ngx.req.get_uri_args()["a"])
}
}
--- more_headers
Cache-Control: max-stale=1000
--- request eval
[ "GET /purge_cached_12_prx?a=1", "GET /purge_cached_12_prx?a=2" ]
--- no_error_log
[error]
--- response_body eval
[ "ORIGIN: 1", "ORIGIN: 2" ]
=== TEST 13a: Prime two keys and break them
--- http_config eval: $::HttpConfig
--- config
location /purge_cached_13_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local sabotage = ngx.req.get_uri_args()["sabotage"]
if sabotage then
-- Set query string to match original request
ngx.req.set_uri_args({a=1})
local redis = require("ledge").create_redis_connection()
local handler = require("ledge").create_handler()
handler.redis = redis
local key_chain = handler:cache_key_chain()
if sabotage == "uri" then
redis:hdel(key_chain.main, "uri")
ngx.print("Sabotaged: uri")
elseif sabotage == "body" then
handler.storage = require("ledge").create_storage_connection()
handler.storage:delete(redis:hget(key_chain.main, entity))
ngx.print("Sabotaged: body storage")
end
else
require("ledge").create_handler():run()
end
}
}
location /purge_cached_13 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.print("TEST 13: ", ngx.req.get_uri_args()["a"], " ", ngx.req.get_headers()["Cookie"])
}
}
--- more_headers
Cookie: primed
--- request eval
[ "GET /purge_cached_13_prx?a=1",
"GET /purge_cached_13_prx?a=2",
"GET /purge_cached_13_prx?a=1&sabotage=body",
"GET /purge_cached_13_prx?a=1&sabotage=uri" ]
--- no_error_log
[error]
--- response_body_like eval
[ "TEST 13: 1 primed",
"TEST 13: 2 primed",
"Sabotaged: body storage",
"Sabotaged: uri" ]
=== TEST 13b: Wildcard purge broken entry with X-Purge: revalidate
--- http_config eval: $::HttpConfig
--- config
location /purge_cached_13_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
body_filter_by_lua_block {
ngx.arg[1] = format_json(ngx.arg[1])
ngx.arg[2] = true
}
}
location /purge_cached_13 {
rewrite ^(.*)$ $1_origin break;
content_by_lua_block {
local a = ngx.req.get_uri_args()["a"]
ngx.log(ngx.DEBUG, "TEST 13 Revalidated: ", a, " ", ngx.req.get_headers()["Cookie"])
}
}
--- more_headers
X-Purge: revalidate
--- request
PURGE /purge_cached_13_prx?*
--- wait: 2
--- error_log
TEST 13 Revalidated: 2 primed
--- response_body_like
purge_mode: revalidate
qless_jobs.1.jid: [a-f0-9]{32}
qless_jobs.1.klass: ledge.jobs.purge
qless_jobs.1.options.jid: [a-f0-9]{32}
qless_jobs.1.options.priority: 5
qless_jobs.1.options.tags.1: purge
result: scheduled
--- error_code: 200
=== TEST 14: Purge API runs
--- http_config eval: $::HttpConfig
--- config
location /purge_api {
content_by_lua_block {
require("ledge.state_machine").set_debug(true)
require("ledge").create_handler():run()
}
body_filter_by_lua_block {
ngx.arg[1] = format_json(ngx.arg[1])
ngx.arg[2] = true
}
}
location /purge_cached_14_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge.state_machine").set_debug(false)
require("ledge").create_handler({
keep_cache_for = 3600,
}):run()
}
}
location /purge_cached_14 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.print("TEST 14: ", ngx.req.get_uri_args()["a"])
}
}
--- request eval
[
"GET /purge_cached_14_prx?a=1", "GET /purge_cached_14_prx?a=2",
qq(PURGE /purge_api
{"uris": ["http://localhost:$LedgeEnv::nginx_port/purge_cached_14_prx?a=1", "http://localhost:$LedgeEnv::nginx_port/purge_cached_14_prx?a=2"]}),
"GET /purge_cached_14_prx?a=1", "GET /purge_cached_14_prx?a=2",
]
--- more_headers eval
[
"","",
"Content-Type: Application/JSON",
"","",
]
--- response_body eval
[
"TEST 14: 1", "TEST 14: 2",
qq(purge_mode: invalidate
result.http://localhost:$LedgeEnv::nginx_port/purge_cached_14_prx?a=1.result: purged
result.http://localhost:$LedgeEnv::nginx_port/purge_cached_14_prx?a=2.result: purged
),
"TEST 14: 1", "TEST 14: 2",
]
--- response_headers_like eval
[
"X-Cache: MISS from .+", "X-Cache: MISS from .+",
"Content-Type: application/json",
"X-Cache: MISS from .+", "X-Cache: MISS from .+",
]
--- no_error_log
[error]
=== TEST 15: Purge API wildcard query string
--- http_config eval: $::HttpConfig
--- config
location /purge_api {
content_by_lua_block {
require("ledge.state_machine").set_debug(true)
require("ledge").create_handler():run()
}
body_filter_by_lua_block {
ngx.arg[1] = format_json(ngx.arg[1])
ngx.arg[2] = true
}
}
location /purge_cached_15_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge.state_machine").set_debug(false)
require("ledge").create_handler({
keep_cache_for = 3600,
}):run()
}
}
location /purge_cached_15 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.print("TEST 15: ", ngx.req.get_uri_args()["a"])
}
}
--- request eval
[
"GET /purge_cached_15_prx?a=1", "GET /purge_cached_15_prx?a=2",
qq(PURGE /purge_api
{"uris": ["http://localhost:$LedgeEnv::nginx_port/purge_cached_15_prx?a*"]}),
]
--- more_headers eval
[
"","",
"Content-Type: Application/JSON",
]
--- response_body_like eval
[
"TEST 15: 1", "TEST 15: 2",
qq(purge_mode: invalidate
result.http://localhost:$LedgeEnv::nginx_port/purge_cached_15_prx\\?a\\*.qless_jobs.1.jid: [a-f0-9]{32}
result.http://localhost:$LedgeEnv::nginx_port/purge_cached_15_prx\\?a\\*.qless_jobs.1.klass: ledge.jobs.purge
result.http://localhost:$LedgeEnv::nginx_port/purge_cached_15_prx\\?a\\*.qless_jobs.1.options.jid: [a-f0-9]{32}
result.http://localhost:$LedgeEnv::nginx_port/purge_cached_15_prx\\?a\\*.qless_jobs.1.options.priority: 5
result.http://localhost:$LedgeEnv::nginx_port/purge_cached_15_prx\\?a\\*.qless_jobs.1.options.tags.1: purge
result.http://localhost:$LedgeEnv::nginx_port/purge_cached_15_prx\\?a\\*.result: scheduled
),
]
--- response_headers_like eval
[
"X-Cache: MISS from .+", "X-Cache: MISS from .+",
"Content-Type: application/json",
]
--- wait: 2
--- no_error_log
[error]
=== TEST 15b: Purge API wildcard query string
--- http_config eval: $::HttpConfig
--- config
location /purge_cached_15_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge.state_machine").set_debug(false)
require("ledge").create_handler({
keep_cache_for = 3600,
}):run()
}
}
location /purge_cached_15 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.print("TEST 15b: ", ngx.req.get_uri_args()["a"])
}
}
--- request eval
["GET /purge_cached_15_prx?a=1", "GET /purge_cached_15_prx?a=2"]
--- response_body_like eval
["TEST 15b: 1", "TEST 15b: 2"]
--- response_headers_like eval
["X-Cache: MISS from .+", "X-Cache: MISS from .+"]
--- no_error_log
[error]
=== TEST 16: Purge API wildcards
--- http_config eval: $::HttpConfig
--- config
location /purge_api {
content_by_lua_block {
require("ledge.state_machine").set_debug(true)
require("ledge").create_handler():run()
}
body_filter_by_lua_block {
ngx.arg[1] = format_json(ngx.arg[1])
ngx.arg[2] = true
}
}
location /purge_cached_16_prx {
rewrite ^(.*)_prx(.*)? $1$2 break;
content_by_lua_block {
require("ledge.state_machine").set_debug(false)
require("ledge").create_handler({
keep_cache_for = 3600,
}):run()
}
}
location /purge_cached_16 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.print("TEST 16: ", ngx.req.get_uri_args()["a"])
}
}
--- request eval
[
"GET /purge_cached_16_prx?a=1", "GET /purge_cached_16_prx?a=2",
qq(PURGE /purge_api
{"uris": ["http://localhost:$LedgeEnv::nginx_port/purge_cached_16_prx*"]}),
]
--- more_headers eval
[
"","",
"Content-Type: Application/JSON",
]
--- response_body_like eval
[
"TEST 16: 1", "TEST 16: 2",
qq(purge_mode: invalidate
result.http://localhost:$LedgeEnv::nginx_port/purge_cached_16_prx\\*.qless_jobs.1.jid: [a-f0-9]{32}
result.http://localhost:$LedgeEnv::nginx_port/purge_cached_16_prx\\*.qless_jobs.1.klass: ledge.jobs.purge
result.http://localhost:$LedgeEnv::nginx_port/purge_cached_16_prx\\*.qless_jobs.1.options.jid: [a-f0-9]{32}
result.http://localhost:$LedgeEnv::nginx_port/purge_cached_16_prx\\*.qless_jobs.1.options.priority: 5
result.http://localhost:$LedgeEnv::nginx_port/purge_cached_16_prx\\*.qless_jobs.1.options.tags.1: purge
result.http://localhost:$LedgeEnv::nginx_port/purge_cached_16_prx\\*.result: scheduled
),
]
--- response_headers_like eval
[
"X-Cache: MISS from .+", "X-Cache: MISS from .+",
"Content-Type: application/json",
]
--- wait: 2
--- no_error_log
[error]
=== TEST 16b: Purge API wildcard check
--- http_config eval: $::HttpConfig
--- config
location /purge_cached_16_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge.state_machine").set_debug(false)
require("ledge").create_handler({
keep_cache_for = 3600,
}):run()
}
}
location /purge_cached_16 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.print("TEST 16b: ", ngx.req.get_uri_args()["a"])
}
}
--- request eval
["GET /purge_cached_16_prx?a=1", "GET /purge_cached_16_prx?a=2"]
--- response_body_like eval
["TEST 16b: 1", "TEST 16b: 2"]
--- response_headers_like eval
["X-Cache: MISS from .+", "X-Cache: MISS from .+"]
--- no_error_log
[error]
=== TEST 17: Purge API - bad request
--- http_config eval: $::HttpConfig
--- config
location /purge_api {
content_by_lua_block {
require("ledge.state_machine").set_debug(true)
require("ledge").create_handler():run()
}
body_filter_by_lua_block {
ngx.arg[1] = format_json(ngx.arg[1])
ngx.arg[2] = true
}
}
--- request eval
[
'PURGE /purge_api
{"uris": ["foobar"]}',
'PURGE /purge_api
this is not valid json',
'PURGE /purge_api
{"foo": ["bar"]}',
'PURGE /purge_api
{"uris": []}',
'PURGE /purge_api
{"uris": "not an array"}',
'PURGE /purge_api
{"uris": ["http://www.example.com/"], "purge_mode": "foobar"}'
]
--- more_headers
Content-Type: Application/JSON
--- error_code eval
[200,400,400,400,400,400]
--- response_body eval
[
"purge_mode: invalidate
result.foobar.error: bad uri: foobar
",
"error: Could not parse request body: Expected value but found invalid token at character 1
",
"error: No URIs provided
",
"error: No URIs provided
",
"error: Field 'uris' must be an array
",
"error: Invalid purge_mode
",
]
--- no_error_log
[error]
=== TEST 17: Purge API passes through purge_mode
--- http_config eval: $::HttpConfig
--- config
location /purge_api {
content_by_lua_block {
require("ledge.state_machine").set_debug(true)
require("ledge").create_handler():run()
}
body_filter_by_lua_block {
ngx.arg[1] = format_json(ngx.arg[1])
ngx.arg[2] = true
}
}
location /purge_cached_17_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge.state_machine").set_debug(false)
require("ledge").create_handler({
keep_cache_for = 3600,
}):run()
}
}
location /purge_cached_17 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.print("TEST 17: ", ngx.req.get_uri_args()["a"])
}
}
--- request eval
[
"GET /purge_cached_17_prx?a=1",
qq(PURGE /purge_api
{"purge_mode": "revalidate", "uris": ["http://localhost:$LedgeEnv::nginx_port/purge_cached_17_prx?a=1"]}),
]
--- more_headers eval
[
"", "Content-Type: Application/JSON",
]
--- response_body_like eval
[
"TEST 17: 1",
qq(purge_mode: revalidate
result.http://localhost:$LedgeEnv::nginx_port/purge_cached_17_prx\\?a=1.qless_jobs.1.jid: [a-f0-9]{32}
result.http://localhost:$LedgeEnv::nginx_port/purge_cached_17_prx\\?a=1.qless_jobs.1.klass: ledge.jobs.revalidate
result.http://localhost:$LedgeEnv::nginx_port/purge_cached_17_prx\\?a=1.qless_jobs.1.options.jid: [a-f0-9]{32}
result.http://localhost:$LedgeEnv::nginx_port/purge_cached_17_prx\\?a=1.qless_jobs.1.options.priority: 4
result.http://localhost:$LedgeEnv::nginx_port/purge_cached_17_prx\\?a=1.qless_jobs.1.options.tags.1: revalidate
result.http://localhost:$LedgeEnv::nginx_port/purge_cached_17_prx\\?a=1.result: purged
),
]
--- wait: 1
=== TEST 18: Purge clears all representations
--- http_config eval: $::HttpConfig
--- config
location /purge {
rewrite ^ /purge_cached_18 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
body_filter_by_lua_block {
ngx.arg[1] = format_json(ngx.arg[1])
ngx.arg[2] = true
}
}
location /purge_cached_18_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
keep_cache_for = 3600,
}):run()
}
}
location /purge_cached_18 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.header["Vary"] = "X-Test"
ngx.print("TEST 18: ", ngx.req.get_headers()["X-Test"])
}
}
--- request eval
[
"GET /purge_cached_18_prx", "GET /purge_cached_18_prx",
"PURGE /purge",
"GET /purge_cached_18_prx", "GET /purge_cached_18_prx",
]
--- more_headers eval
[
"X-Test: abc", "X-Test: xyz",
"",
"X-Test: abc", "X-Test: xyz",
]
--- response_body eval
[
"TEST 18: abc", "TEST 18: xyz",
"purge_mode: invalidate
result: purged
",
"TEST 18: abc", "TEST 18: xyz",
]
--- response_headers_like eval
[
"X-Cache: MISS from .+", "X-Cache: MISS from .+",
"",
"X-Cache: MISS from .+", "X-Cache: MISS from .+",
]
--- no_error_log
[error]
=== TEST 19: Purge response with no body
--- http_config eval: $::HttpConfig
--- config
location /purge {
rewrite ^ /purge_cached_19 break;
content_by_lua_block {
require("ledge.state_machine").set_debug(true)
require("ledge").create_handler():run()
}
body_filter_by_lua_block {
ngx.arg[1] = format_json(ngx.arg[1])
ngx.arg[2] = true
}
}
location /purge_cached_19_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge.state_machine").set_debug(false)
require("ledge").create_handler({
keep_cache_for = 3600,
}):run()
}
}
location /purge_cached_19 {
content_by_lua_block {
local incr = ngx.shared.ledge_test:incr("test19", 1, 0)
ngx.header["Cache-Control"] = "max-age=3600"
ngx.header["X-Incr"] = incr
}
}
--- request eval
[
"GET /purge_cached_19_prx", "GET /purge_cached_19_prx",
"PURGE /purge",
"GET /purge_cached_19_prx"
]
--- error_code eval
[200, 200, 200, 200]
--- response_headers_like eval
[
"X-Cache: MISS from .+
X-Incr: 1",
"X-Cache: HIT from .+
X-Incr: 1",
"",
"X-Cache: MISS from .+
X-Incr: 2"
]
--- no_error_log
[error]
================================================
FILE: t/02-integration/range.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config(run_worker => 1);
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Prime cache for subsequent tests.
--- http_config eval: $::HttpConfig
--- config
location /range_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /range {
content_by_lua_block {
ngx.header["Cache-Control"] = "public, max-age=3600";
ngx.print("0123456789");
}
}
--- request
GET /range_prx
--- response_body: 0123456789
--- error_code: 200
--- no_error_log
[error]
=== TEST 2: Cache HIT, get the first byte only
--- http_config eval: $::HttpConfig
--- config
location /range_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- more_headers
Range: bytes=0-1
--- request
GET /range_prx
--- response_headers_like
X-Cache: HIT from .*
--- response_headers
Content-Range: bytes 0-1/10
Cache-Control: public, max-age=3600
--- response_body: 01
--- error_code: 206
--- no_error_log
[error]
=== TEST 3: Cache HIT, get middle bytes
--- http_config eval: $::HttpConfig
--- config
location /range_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- more_headers
Range: bytes=3-5
--- request
GET /range_prx
--- response_headers_like
X-Cache: HIT from .*
--- response_headers
Content-Range: bytes 3-5/10
Cache-Control: public, max-age=3600
--- response_body: 345
--- error_code: 206
--- no_error_log
[error]
=== TEST 4: Cache HIT, get middle to end bytes
--- http_config eval: $::HttpConfig
--- config
location /range_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- more_headers
Range: bytes=6-
--- request
GET /range_prx
--- response_headers_like
X-Cache: HIT from .*
--- response_headers
Content-Range: bytes 6-9/10
Cache-Control: public, max-age=3600
--- response_body: 6789
--- error_code: 206
--- no_error_log
[error]
=== TEST 5: Cache HIT, get offset from end bytes.
--- http_config eval: $::HttpConfig
--- config
location /range_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- more_headers
Range: bytes=-4
--- request
GET /range_prx
--- response_headers_like
X-Cache: HIT from .*
--- response_headers
Content-Range: bytes 6-9/10
Cache-Control: public, max-age=3600
--- response_body: 6789
--- error_code: 206
--- no_error_log
[error]
=== TEST 5b: Cache HIT, get byte to end
--- http_config eval: $::HttpConfig
--- config
location /range_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- more_headers
Range: bytes=2-
--- request
GET /range_prx
--- response_headers_like
X-Cache: HIT from .*
--- response_headers
Content-Range: bytes 2-9/10
Cache-Control: public, max-age=3600
--- response_body: 23456789
--- error_code: 206
--- no_error_log
[error]
=== TEST 6: Cache HIT, get beginning bytes spanning buffer size
--- http_config eval: $::HttpConfig
--- config
location /range_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
buffer_size = 2,
}):run()
}
}
--- more_headers
Range: bytes=0-5
--- request
GET /range_prx
--- response_headers_like
X-Cache: HIT from .*
--- response_headers
Content-Range: bytes 0-5/10
Cache-Control: public, max-age=3600
--- response_body: 012345
--- error_code: 206
--- no_error_log
[error]
=== TEST 7: Cache HIT, get middle bytes spanning buffer size
--- http_config eval: $::HttpConfig
--- config
location /range_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
buffer_size = 4,
}):run()
}
}
--- more_headers
Range: bytes=3-7
--- request
GET /range_prx
--- response_headers_like
X-Cache: HIT from .*
--- response_headers
Content-Range: bytes 3-7/10
Cache-Control: public, max-age=3600
--- response_body: 34567
--- error_code: 206
--- no_error_log
[error]
=== TEST 8: Ask for range outside content length, last byte should be reduced to length.
--- http_config eval: $::HttpConfig
--- config
location /range_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- more_headers
Range: bytes=3-12
--- request
GET /range_prx
--- response_headers_like
X-Cache: HIT from .*
--- response_headers
Content-Range: bytes 3-9/10
Cache-Control: public, max-age=3600
--- response_body: 3456789
--- error_code: 206
--- no_error_log
[error]
=== TEST 9: Range end is smaller than range start (unsatisfiable)
--- http_config eval: $::HttpConfig
--- config
location /range_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- more_headers
Range: bytes=12-3
--- request
GET /range_prx
--- response_headers
Content-Range: bytes */10
--- response_body:
--- error_code: 416
--- no_error_log
[error]
=== TEST 9b: Range end offset is larger than range (unsatisfiable)
--- http_config eval: $::HttpConfig
--- config
location /range_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- more_headers
Range: bytes=-12
--- request
GET /range_prx
--- response_headers
Content-Range: bytes */10
--- response_body:
--- error_code: 416
--- no_error_log
[error]
=== TEST 10: Range is incompreshensible
--- http_config eval: $::HttpConfig
--- config
location /range_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- more_headers
Range: bytes=asdfa
--- request
GET /range_prx
--- response_headers
Content-Range: bytes */10
--- response_body:
--- error_code: 416
--- no_error_log
[error]
=== TEST 10b: Range is incompreshensible
--- http_config eval: $::HttpConfig
--- config
location /range_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- more_headers
Range: isdfsdbytes=asdfa
--- request
GET /range_prx
--- response_headers
Content-Range: bytes */10
--- response_body:
--- error_code: 416
--- no_error_log
[error]
=== TEST 11: Multi byte ranges
--- http_config eval: $::HttpConfig
--- config
location /range_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- more_headers
Range: bytes=0-3,5-8
--- request
GET /range_prx
--- no_error_log
[error]
--- response_body_like chop
--[0-9a-z]+
Content-Type: text/plain
Content-Range: bytes 0-3/10
0123
--[0-9a-z]+
Content-Type: text/plain
Content-Range: bytes 5-8/10
5678
--[0-9a-z]+--
--- error_code: 206
--- no_error_log
[error]
=== TEST 12a: Prime cache with buffers smaller than range.
--- http_config eval: $::HttpConfig
--- config
location /range_12_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
buffer_size = 3,
}):run()
}
}
location /range_12 {
content_by_lua_block {
ngx.header["Cache-Control"] = "public, max-age=3600";
ngx.status = 200
ngx.print("0123456789");
}
}
--- request
GET /range_12_prx
--- response_body: 0123456789
--- no_error_log
[error]
=== TEST 12b: Multi byte ranges across chunk boundaries
--- http_config eval: $::HttpConfig
--- config
location /range_12_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- more_headers
Range: bytes=0-3,5-8
--- request
GET /range_12_prx
--- no_error_log
[error]
--- response_body_like chop
--[0-9a-z]+
Content-Type: text/plain
Content-Range: bytes 0-3/10
0123
--[0-9a-z]+
Content-Type: text/plain
Content-Range: bytes 5-8/10
5678
--[0-9a-z]+--
--- error_code: 206
--- no_error_log
[error]
=== TEST 12c: Single range which spans chunk boundaries
--- http_config eval: $::HttpConfig
--- config
location /range_12_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- more_headers
Range: bytes=4-7
--- request
GET /range_12_prx
--- response_headers
Content-Range: bytes 4-7/10
--- response_body: 4567
--- error_code: 206
--- no_error_log
[error]
=== TEST 12d: Multi byte reversed ranges. Return in sane order.
--- http_config eval: $::HttpConfig
--- config
location /range_12_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- more_headers
Range: bytes=5-8,0-3
--- request
GET /range_12_prx
--- no_error_log
[error]
--- response_body_like chop
--[0-9a-z]+
Content-Type: text/plain
Content-Range: bytes 0-3/10
0123
--[0-9a-z]+
Content-Type: text/plain
Content-Range: bytes 5-8/10
5678
--[0-9a-z]+--
--- error_code: 206
--- no_error_log
[error]
=== TEST 12d: Multi byte reversed overlapping ranges. Return in sane order and coalesced.
--- http_config eval: $::HttpConfig
--- config
location /range_12_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- more_headers
Range: bytes=5-8,0-3,4-6
--- request
GET /range_12_prx
--- no_error_log
[error]
--- response_body_like chop
--[0-9a-z]+
Content-Type: text/plain
Content-Range: bytes 0-3/10
0123
--[0-9a-z]+
Content-Type: text/plain
Content-Range: bytes 4-8/10
45678
--[0-9a-z]+--
--- error_code: 206
--- no_error_log
[error]
=== TEST 12d: Multi byte reversed overlapping ranges. Return in sane order and coalesced to single range.
--- http_config eval: $::HttpConfig
--- config
location /range_12_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- more_headers
Range: bytes=5-8,0-3,3-6
--- request
GET /range_12_prx
--- response_headers
Content-Range: bytes 0-8/10
--- response_body: 012345678
--- error_code: 206
--- no_error_log
[error]
=== TEST 13a: Prime with ESI content, thus of interderminate length.
--- http_config eval: $::HttpConfig
--- config
location /range_13_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
esi_enabled = true,
}):run()
}
}
location /range_13 {
default_type text/html;
content_by_lua_block {
ngx.header["Cache-Control"] = "public, max-age=3600"
ngx.header["Surrogate-Control"] = 'content="ESI/1.0"'
ngx.status = 200
ngx.print("01");
ngx.print("$(QUERY_STRING{a})")
ngx.print("56789");
}
}
--- request
GET /range_13_prx?a=234
--- response_body: 0123456789
--- no_error_log
[error]
=== TEST 13b: Normal range over indeterminate length (must 200 with full reply)
--- http_config eval: $::HttpConfig
--- config
location /range_13_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge.state_machine").set_debug(true)
require("ledge").create_handler({
esi_enabled = true,
}):run()
}
}
--- more_headers
Range: bytes=0-5
--- request
GET /range_13_prx?a=234
--- response_body: 0123456789
--- error_code: 200
--- no_error_log
[error]
=== TEST 13c: Offset to end over indeterminate length (must 200 with full reply)
--- http_config eval: $::HttpConfig
--- config
location /range_13_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
esi_enabled = true,
}):run()
}
}
--- more_headers
Range: bytes=-5
--- request
GET /range_13_prx?a=234
--- response_body: 0123456789
--- error_code: 200
--- no_error_log
[error]
=== TEST 13d: Range to end over indeterminate length (must 200 with full reply)
--- http_config eval: $::HttpConfig
--- config
location /range_13_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
esi_enabled = true,
}):run()
}
}
--- more_headers
Range: bytes=5-
--- request
GET /range_13_prx?a=234
--- response_body: 0123456789
--- error_code: 200
--- no_error_log
[error]
=== TEST 14: Confirm we do not cache 206 responses from upstream
--- http_config eval: $::HttpConfig
--- config
location /range_14_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /range_14 {
content_by_lua_block {
ngx.status = 206
ngx.header["Cache-Control"] = "public, max-age=3600";
ngx.header["Content-Range"] = "bytes 0-5/10"
ngx.print("012345");
}
}
--- more_headers
Range: bytes=0-5
--- request eval
["GET /range_14_prx", "GET /range_14_prx"]
--- raw_response_headers_unlike eval
["X-Cache", "X-Cache"]
--- response_body eval
["012345", "012345"]
--- wait: 1
--- error_code eval
[206, 206]
--- no_error_log
[error]
=== TEST 15: Confirm we do not cache 416 responses from upstream
--- http_config eval: $::HttpConfig
--- config
location /range_15_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /range_15 {
content_by_lua_block {
ngx.status = 416
ngx.header["Cache-Control"] = "public, max-age=3600";
ngx.header["Content-Range"] = "bytes */10"
}
}
--- more_headers
Range: bytes=11-
--- request eval
["GET /range_15_prx", "GET /range_15_prx"]
--- raw_response_headers_unlike eval
["X-Cache", "X-Cache"]
--- response_body eval
["", ""]
--- error_code eval
[416, 416]
--- no_error_log
[error]
=== TEST 16: Confirm we do not attempt range processing on non-200 responses
--- http_config eval: $::HttpConfig
--- config
location /range_16_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /range_16 {
content_by_lua_block {
ngx.status = 404
ngx.header["Cache-Control"] = "public, max-age=3600";
ngx.print("0123456789")
}
}
--- more_headers
Range: bytes=0-5
--- request eval
["GET /range_16_prx", "GET /range_16_prx"]
--- response_headers_like eval
["X-Cache: MISS from .*", "X-Cache: HIT from .*"]
--- response_body eval
["0123456789", "0123456789"]
--- error_code eval
[404, 404]
--- no_error_log
[error]
=== TEST 17: Cache miss range request
Upstream returns range, triggers background fetch
--- http_config eval: $::HttpConfig
--- config
location /range_17_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge.state_machine").set_debug(true)
require("ledge").create_handler():run()
}
}
location /range_17 {
content_by_lua_block {
if ngx.req.get_headers()["Range"] then
ngx.status = 206
ngx.header["Cache-Control"] = "public, max-age=3600"
ngx.header["Content-Range"] = "bytes 1-5/10"
ngx.print("012345")
else
ngx.status = 200
ngx.header["Cache-Control"] = "public, max-age=3600"
ngx.print("0123456789")
end
}
}
--- more_headers
Range: bytes=0-5
--- request
GET /range_17_prx
--- response_body: 012345
--- raw_response_headers_unlike
X-Cache: .*
--- wait: 2
--- error_code: 206
--- no_error_log
[error]
=== TEST 17b: Confirm revalidation, with a different range
--- http_config eval: $::HttpConfig
--- config
location /range_17_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- more_headers
Range: bytes=6-
--- request
GET /range_17_prx
--- response_body: 6789
--- response_headers_like
X-Cache: HIT from .*
--- error_code: 206
--- no_error_log
[error]
=== TEST 18: Cache miss range request, upstream returns range, but size is too big for background fetch
--- http_config eval: $::HttpConfig
--- config
location /range_18_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
-- Set max memory on first hit, but not on background fetch.
-- We want to test that the job is never started
if ngx.req.get_headers()["Range"] then
local handler = require("ledge").create_handler()
handler.config.storage_driver_config.max_size = 9 -- < 10
handler:run()
else
require("ledge").create_handler():run()
end
}
}
location /range_18 {
content_by_lua_block {
if ngx.req.get_headers()["Range"] then
ngx.status = 206
ngx.header["Cache-Control"] = "public, max-age=3600";
ngx.header["Content-Range"] = "bytes 0-5/10"
ngx.print("012345");
else
ngx.status = 200
ngx.header["Cache-Control"] = "public, max-age=3600";
ngx.print("0123456789");
end
}
}
--- more_headers
Range: bytes=0-5
--- request
GET /range_18_prx
--- response_body: 012345
--- raw_response_headers_unlike
X-Cache: .*
--- wait: 1
--- error_code: 206
--- no_error_log
[error]
=== TEST 18b: Confirm revalidation has not happened
--- http_config eval: $::HttpConfig
--- config
location /range_18_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /range_18 {
content_by_lua_block {
ngx.header["Cache-Control"] = "public, max-age=3600";
ngx.print("MISS")
}
}
--- more_headers
Range: bytes=6-
--- request
GET /range_18_prx
--- response_body: MISS
--- response_headers_like
X-Cache: MISS from .*
--- error_code: 200
--- no_error_log
[error]
=== TEST 19: Cache miss range request
Upstream returns range, but size is unknown
--- http_config eval: $::HttpConfig
--- config
location /range_19_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
-- Set max memory on first hit, but not on background fetch.
-- We want to test that the job is never started
if ngx.req.get_headers()["Range"] then
local handler = require("ledge").create_handler()
handler.config.storage_driver_config.max_size = 9 -- < 10
handler:run()
else
require("ledge").create_handler():run()
end
}
}
location /range_19 {
content_by_lua_block {
if ngx.req.get_headers()["Range"] then
ngx.status = 206
ngx.header["Cache-Control"] = "public, max-age=3600";
ngx.header["Content-Range"] = "bytes 0-5/*"
ngx.print("012345");
else
ngx.status = 200
ngx.header["Cache-Control"] = "public, max-age=3600";
ngx.print("0123456789");
end
}
}
--- more_headers
Range: bytes=0-5
--- request
GET /range_19_prx
--- response_body: 012345
--- raw_response_headers_unlike
X-Cache: .*
--- wait: 1
--- error_code: 206
--- no_error_log
[error]
=== TEST 19b: Confirm revalidation has not happened
--- http_config eval: $::HttpConfig
--- config
location /range_19_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /range_19 {
content_by_lua_block {
ngx.header["Cache-Control"] = "public, max-age=3600";
ngx.print("MISS")
}
}
--- more_headers
Range: bytes=6-
--- request
GET /range_19_prx
--- response_body: MISS
--- response_headers_like
X-Cache: MISS from .*
--- error_code: 200
--- no_error_log
[error]
================================================
FILE: t/02-integration/req_body.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config();
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Should pass through request body
--- http_config eval: $::HttpConfig
--- config
location /cached_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /cached {
content_by_lua_block {
ngx.req.read_body()
ngx.say({ngx.req.get_body_data()})
}
}
--- request
POST /cached_prx
requestbody
--- response_body
requestbody
================================================
FILE: t/02-integration/req_method.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config();
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: GET
--- http_config eval: $::HttpConfig
--- config
location /req_method_1_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /req_method_1 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.header["Etag"] = "req_method_1"
ngx.say(ngx.req.get_method())
}
}
--- request
GET /req_method_1_prx
--- response_body
GET
--- no_error_log
[error]
=== TEST 2: HEAD gets GET request
--- http_config eval: $::HttpConfig
--- config
location /req_method_1 {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- request
GET /req_method_1
--- response_headers
Etag: req_method_1
--- no_error_log
[error]
=== TEST 3: HEAD revalidate
--- http_config eval: $::HttpConfig
--- config
location /req_method_1_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /req_method_1 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.header["Etag"] = "req_method_1"
}
}
--- more_headers
Cache-Control: max-age=0
--- request
HEAD /req_method_1_prx
--- response_headers
Etag: req_method_1
--- no_error_log
[error]
=== TEST 4: GET still has body
--- http_config eval: $::HttpConfig
--- config
location /req_method_1 {
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- request
GET /req_method_1
--- response_headers
Etag: req_method_1
--- response_body
GET
--- no_error_log
[error]
=== TEST 5: POST does not get cached copy
--- http_config eval: $::HttpConfig
--- config
location /req_method_1_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /req_method_1 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.header["Etag"] = "req_method_posted"
ngx.say(ngx.req.get_method())
}
}
--- request
POST /req_method_1_prx
--- response_headers
Etag: req_method_posted
--- response_body
POST
--- no_error_log
[error]
=== TEST 6: GET uses cached POST response.
--- http_config eval: $::HttpConfig
--- config
location /req_method_1 {
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- request
GET /req_method_1
--- response_headers
Etag: req_method_posted
--- response_body
POST
--- no_error_log
[error]
=== TEST 7: 501 on unrecognised method
--- http_config eval: $::HttpConfig
--- config
location /req_method_1 {
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- request
FOOBAR /req_method_1
--- error_code: 501
--- no_error_log
[error]
================================================
FILE: t/02-integration/request_leak.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config(extra_nginx_config => qq{
if_modified_since off;
lua_check_client_abort on;
});
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Aborted request does not leak body into subsequent request
--- http_config eval
"$::HttpConfig"
--- config
location = /trigger {
content_by_lua_block {
-- Send broken request and close socket
local broken_sock = ngx.socket.tcp()
broken_sock:settimeout(5000)
local ok, err = broken_sock:connect("127.0.0.1", ngx.var.server_port)
broken_sock:send("POST /target?id=1 HTTP/1.1\r\nHost: 127.0.0.1\r\nContent-Length: 16\r\n\r\n123\r\n")
broken_sock:close()
-- Send valid request and leave socket open
local valid_sock = ngx.socket.tcp()
valid_sock:settimeout(1000)
local ok, err = valid_sock:connect("127.0.0.1", ngx.var.server_port)
valid_sock:send("GET /target?id=2 HTTP/1.1\r\nHost: 127.0.0.1\r\n\r\n")
-- Wait and read until end of headers
local header_reader = valid_sock:receiveuntil("\r\n\r\n")
local headers
repeat
headers = header_reader()
until headers
ngx.log(ngx.INFO, "HEADERS: ", headers)
-- We're expecting chunked encoding
if not headers:find("chunked") then
ngx.log(ngx.ERR, "Expected chunked response but no header indicating such, failed!")
ngx.exit(400)
end
-- Read chunk length as base16
local chunk_len = tonumber(valid_sock:receive('*l'), 16)
-- Read full chunk off wire
local body, err, partial
repeat
body, err, partial = valid_sock:receive(chunk_len)
until body or err
valid_sock:close()
if err then
ngx.exit(400)
end
ngx.print(body)
}
}
location /target {
rewrite /target$ /origin break;
content_by_lua_block {
ngx.req.set_header("Host", "127.0.0.2")
require("ledge").create_handler():run()
}
}
location = /origin {
content_by_lua_block {
ngx.req.read_body()
local args, err = ngx.req.get_uri_args()
local data = ngx.req.get_body_data() or ''
local method = ngx.req.get_method() or ''
ngx.print("ORIGIN-", args['id'], "-", method, ":", data)
ngx.exit(200)
}
}
--- request
GET /trigger
--- response_body: ORIGIN-2-GET:
================================================
FILE: t/02-integration/response.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config();
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Header case insensitivity
--- http_config eval: $::HttpConfig
--- config
location /response_1_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("after_upstream_request", function(res)
if res.header["X-tesT"] == "1" then
res.header["x-TESt"] = "2"
end
if res.header["X-TEST"] == "2" then
res.header["x-test"] = "3"
end
end)
handler:run()
}
}
location /response_1 {
content_by_lua_block {
ngx.header["X-Test"] = "1"
ngx.say("OK")
}
}
--- request
GET /response_1_prx
--- response_headers
X-Test: 3
--- no_error_log
[error]
=== TEST 2: TTL from s-maxage (overrides max-age / Expires)
--- http_config eval: $::HttpConfig
--- config
location /response_2_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("before_serve", function(res)
res.header["X-TTL"] = res:ttl()
end)
handler:run()
}
}
location /response_2 {
content_by_lua_block {
ngx.header["Expires"] = ngx.http_time(ngx.time() + 300)
ngx.header["Cache-Control"] = "max-age=600, s-maxage=1200"
ngx.say("OK")
}
}
--- more_headers
Cache-Control: no-cache
--- request
GET /response_2_prx
--- response_headers
X-TTL: 1200
--- no_error_log
[error]
=== TEST 3: TTL from max-age (overrides Expires)
--- http_config eval: $::HttpConfig
--- config
location /response_3_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("before_serve", function(res)
res.header["X-TTL"] = res:ttl()
end)
handler:run()
}
}
location /response_3 {
content_by_lua_block {
ngx.header["Expires"] = ngx.http_time(ngx.time() + 300)
ngx.header["Cache-Control"] = "max-age=600"
ngx.say("OK")
}
}
--- more_headers
Cache-Control: no-cache
--- request
GET /response_3_prx
--- response_headers
X-TTL: 600
--- no_error_log
[error]
=== TEST 4: TTL from Expires
--- http_config eval: $::HttpConfig
--- config
location /response_4_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("before_serve", function(res)
res.header["X-TTL"] = res:ttl()
end)
handler:run()
}
}
location /response_4 {
content_by_lua_block {
ngx.header["Expires"] = ngx.http_time(ngx.time() + 300)
ngx.say("OK")
}
}
--- more_headers
Cache-Control: no-cache
--- request
GET /response_4_prx
--- response_headers
X-TTL: 300
--- no_error_log
[error]
=== TEST 4b: TTL from Expires, when there are multiple Expires headers
--- http_config eval: $::HttpConfig
--- config
location /response_4b_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("before_serve", function(res)
res.header["X-TTL"] = res:ttl()
end)
handler:run()
}
}
location /response_4b {
set $ttl_1 0;
set $ttl_2 0;
access_by_lua_block {
ngx.var.ttl_1 = ngx.http_time(ngx.time() + 300)
ngx.var.ttl_2 = ngx.http_time(ngx.time() + 100)
}
add_header Expires $ttl_1;
add_header Expires $ttl_2;
echo "OK";
}
--- more_headers
Cache-Control: no-cache
--- request
GET /response_4b_prx
--- response_headers
X-TTL: 100
--- no_error_log
[error]
================================================
FILE: t/02-integration/ssl.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
$ENV{TEST_NGINX_HTML_DIR} ||= html_dir();
$ENV{TEST_NGINX_SOCKET_DIR} ||= $ENV{TEST_NGINX_HTML_DIR};
sub read_file {
my $infile = shift;
open my $in, $infile
or die "cannot open $infile for reading: $!";
my $cert = do { local $/; <$in> };
close $in;
$cert;
}
our $RootCACert = read_file("t/cert/rootCA.pem");
our $ExampleCert = read_file("t/cert/example.com.crt");
our $ExampleKey = read_file("t/cert/example.com.key");
our $HttpConfig = LedgeEnv::http_config(extra_nginx_config => qq{
lua_ssl_trusted_certificate "../html/rootca.pem";
ssl_certificate "../html/example.com.crt";
ssl_certificate_key "../html/example.com.key";
}, extra_lua_config => qq{
-- SSL helper function
function do_ssl(ssl_opts, params)
local ssl_opts = ssl_opts or {}
if not ssl_opts.verify then
ssl_opts.verify = false
end
if not ssl_opts.send_status_req then
ssl_opts.send_status_req = false
end
local httpc_ssl = require("resty.http").new()
local ok, err =
httpc_ssl:connect("unix:$ENV{TEST_NGINX_SOCKET_DIR}/nginx-ssl.sock")
if not ok then
ngx.say("Unable to connect to sock, ", err)
return ngx.exit(ngx.status)
end
session, err = httpc_ssl:ssl_handshake(
nil,
ssl_opts.sni_name,
ssl_opts.verify,
ssl_opts.send_status_req
)
if err then
ngx.say("Unable to sslhandshake, ", err)
return ngx.exit(ngx.status)
end
httpc_ssl:set_timeout(2000)
if params then
return httpc_ssl:request(params)
else
return httpc_ssl:proxy_request()
end
end
require("ledge").set_handler_defaults({
upstream_host = "unix:$ENV{TEST_NGINX_SOCKET_DIR}/nginx-ssl.sock",
upstream_use_ssl = true,
upstream_ssl_server_name = "example.com",
upstream_ssl_verify = true,
})
}, run_worker => 1);
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: SSL works
--- http_config eval: $::HttpConfig
--- config
listen unix:$TEST_NGINX_SOCKET_DIR/nginx-ssl.sock ssl;
location /upstream_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /upstream {
content_by_lua_block {
ngx.say("OK ", ngx.var.scheme)
}
}
--- user_files eval
">>> rootca.pem
$::RootCACert
>>> example.com.key
$::ExampleKey
>>> example.com.crt
$::ExampleCert"
--- request
GET /upstream_prx
--- error_code: 200
--- no_error_log
[error]
--- response_body
OK https
=== TEST 2: Bad SSL name errors
--- http_config eval: $::HttpConfig
--- config
listen unix:$TEST_NGINX_SOCKET_DIR/nginx-ssl.sock ssl;
location /upstream_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
upstream_ssl_server_name = "foobar",
}):run()
}
}
location /upstream {
content_by_lua_block {
ngx.say("OK ", ngx.var.scheme)
}
}
--- user_files eval
">>> rootca.pem
$::RootCACert
>>> example.com.key
$::ExampleKey
>>> example.com.crt
$::ExampleCert"
--- request
GET /upstream_prx
--- error_code: 525
--- error_log
ssl handshake failed
--- response_body:
=== TEST 3: SSL verification can be disabled
--- http_config eval: $::HttpConfig
--- config
listen unix:$TEST_NGINX_SOCKET_DIR/nginx-ssl.sock ssl;
location /upstream_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
upstream_ssl_server_name = "foobar",
upstream_ssl_verify = false
}):run()
}
}
location /upstream {
content_by_lua_block {
ngx.say("OK ", ngx.var.scheme)
}
}
--- user_files eval
">>> rootca.pem
$::RootCACert
>>> example.com.key
$::ExampleKey
>>> example.com.crt
$::ExampleCert"
--- request
GET /upstream_prx
--- error_code: 200
--- no_error_log
[error]
--- response_body
OK https
=== TEST 4: Empty SSL name treated as nil
--- http_config eval: $::HttpConfig
--- config
listen unix:$TEST_NGINX_SOCKET_DIR/nginx-ssl.sock ssl;
location /upstream_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
upstream_ssl_server_name = "",
}):run()
}
}
location /upstream {
content_by_lua_block {
ngx.say("OK ", ngx.var.scheme)
}
}
--- user_files eval
">>> rootca.pem
$::RootCACert
>>> example.com.key
$::ExampleKey
>>> example.com.crt
$::ExampleCert"
--- request
GET /upstream_prx
--- error_code: 200
--- no_error_log
[error]
--- response_body
OK https
=== TEST 9a: Prime another key
--- http_config eval: $::HttpConfig
--- config
listen unix:$TEST_NGINX_SOCKET_DIR/nginx-ssl.sock ssl;
location /purge_ssl_entry {
rewrite ^(.*)_entry$ $1_prx break;
content_by_lua_block {
local res, err = do_ssl(nil)
ngx.print(res:read_body())
}
}
location /purge_ssl_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
keep_cache_for = 3600,
}):run()
}
}
location /purge_ssl {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("TEST 9: ", ngx.req.get_headers()["Cookie"])
}
}
--- user_files eval
">>> rootca.pem
$::RootCACert
>>> example.com.key
$::ExampleKey
>>> example.com.crt
$::ExampleCert"
--- more_headers
Cookie: primed
--- request
GET /purge_ssl_entry
--- no_error_log
[error]
--- response_body
TEST 9: primed
=== TEST 9b: Purge with X-Purge: revalidate
--- http_config eval: $::HttpConfig
--- config
listen unix:$TEST_NGINX_SOCKET_DIR/nginx-ssl.sock ssl;
location /purge_ssl_entry {
rewrite ^(.*)_entry$ $1_prx break;
content_by_lua_block {
local res, err = do_ssl(nil)
ngx.print(res:read_body())
}
}
location /purge_ssl_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /purge_ssl {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("TEST 9 Revalidated: ", ngx.req.get_headers()["Cookie"])
}
}
--- user_files eval
">>> rootca.pem
$::RootCACert
>>> example.com.key
$::ExampleKey
>>> example.com.crt
$::ExampleCert"
--- more_headers
X-Purge: revalidate
--- request
PURGE /purge_ssl_entry
--- wait: 2
--- no_error_log
[error]
--- response_body_like: "result":"purged"
--- error_code: 200
=== TEST 9c: Confirm cache was revalidated
--- http_config eval: $::HttpConfig
--- config
listen unix:$TEST_NGINX_SOCKET_DIR/nginx-ssl.sock ssl;
location /purge_ssl_entry {
rewrite ^(.*)_entry$ $1_prx break;
content_by_lua_block {
local res, err = do_ssl(nil)
ngx.print(res:read_body())
}
}
location /purge_ssl_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- user_files eval
">>> rootca.pem
$::RootCACert
>>> example.com.key
$::ExampleKey
>>> example.com.crt
$::ExampleCert"
--- request
GET /purge_ssl_entry
--- no_error_log
[error]
--- response_body
TEST 9 Revalidated: primed
=== TEST 10: ESI include fragment
--- log_level: debug
--- http_config eval: $::HttpConfig
--- config
listen unix:$TEST_NGINX_SOCKET_DIR/nginx-ssl.sock ssl;
location /esi_ssl_entry {
rewrite ^(.*)_entry$ $1_prx break;
content_by_lua_block {
local res, err = do_ssl(nil)
ngx.print(res:read_body())
}
}
location /esi_ssl_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
esi_enabled = true,
}):run()
}
}
location /fragment_1 {
content_by_lua_block {
ngx.say("FRAGMENT: ", ngx.req.get_uri_args()["a"] or "", "|", ngx.var.scheme)
}
}
location /esi_ssl {
default_type text/html;
content_by_lua_block {
ngx.header["Surrogate-Control"] = [[content="ESI/1.0"]]
ngx.say("1")
ngx.print([[]])
ngx.say("2")
ngx.print([[]])
ngx.print("3")
ngx.print([[]])
}
}
--- user_files eval
">>> rootca.pem
$::RootCACert
>>> example.com.key
$::ExampleKey
>>> example.com.crt
$::ExampleCert"
--- request
GET /esi_ssl_entry
--- raw_response_headers_unlike: Surrogate-Control: content="ESI/1.0\"\r\n
--- response_body
1
FRAGMENT: |https
2
FRAGMENT: 2|https
3FRAGMENT: 3|http
--- no_error_log
[error]
================================================
FILE: t/02-integration/stale-if-error.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config();
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Prime cache for subsequent tests
--- http_config eval: $::HttpConfig
--- config
location /stale_if_error_1_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /stale_if_error_1 {
content_by_lua_block {
ngx.header["Cache-Control"] =
"max-age=3600, s-maxage=60, stale-if-error=60"
ngx.say("TEST 1")
}
}
--- more_headers
Cache-Control: no-cache
--- request
GET /stale_if_error_1_prx
--- response_body
TEST 1
--- response_headers_like
X-Cache: MISS from .*
--- no_error_log
[error]
=== TEST 1b: Assert standard non-stale behaviours are unaffected.
--- http_config eval: $::HttpConfig
--- config
location /stale_if_error_1_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /stale_if_error_1 {
return 500;
}
--- more_headers eval
[
"Cache-Control: no-cache",
"Cache-Control: no-store",
"Pragma: no-cache",
""
]
--- request eval
[
"GET /stale_if_error_1_prx",
"GET /stale_if_error_1_prx",
"GET /stale_if_error_1_prx",
"GET /stale_if_error_1_prx"
]
--- error_code eval
[
500,
500,
500,
200
]
--- raw_response_headers_unlike eval
[
"Warning: .*",
"Warning: .*",
"Warning: .*",
"Warning: .*"
]
--- no_error_log
[error]
=== TEST 2: Prime cache and expire it
--- http_config eval: $::HttpConfig
--- config
location /stale_if_error_2_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("before_save", function(res)
res.header["Cache-Control"] =
"max-age=0, s-maxage=0, stale-if-error=60"
end)
handler:run()
}
}
location /stale_if_error_2 {
content_by_lua_block {
ngx.header["Cache-Control"] =
"max-age=3600, s-maxage=60, stale-if-error=60"
ngx.print("TEST 2")
}
}
--- more_headers
Cache-Control: no-cache
--- request
GET /stale_if_error_2_prx
--- response_body: TEST 2
--- response_headers_like
X-Cache: MISS from .*
--- wait: 2
--- no_error_log
[error]
=== TEST 2b: Request does not accept stale, for different reasons
--- http_config eval: $::HttpConfig
--- config
location /stale_if_error_2_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /stale_if_error_2 {
return 500;
}
--- more_headers eval
[
"Cache-Control: min-fresh=5",
"Cache-Control: max-age=1",
"Cache-Control: max-stale=1"
]
--- request eval
[
"GET /stale_if_error_2_prx",
"GET /stale_if_error_2_prx",
"GET /stale_if_error_2_prx"
]
--- error_code eval
[
500,
500,
500
]
--- raw_response_headers_unlike eval
[
"Warning: .*",
"Warning: .*",
"Warning: .*"
]
--- response_body_unlike eval
[
"TEST 2",
"TEST 2",
"TEST 2",
]
--- no_error_log
[error]
=== TEST 2c: Request accepts stale
--- http_config eval: $::HttpConfig
--- config
location /stale_if_error_2_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /stale_if_error_2 {
return 500;
}
--- more_headers eval
[
"Cache-Control: max-age=99999",
""
]
--- request eval
[
"GET /stale_if_error_2_prx",
"GET /stale_if_error_2_prx"
]
--- response_body eval
[
"TEST 2",
"TEST 2"
]
--- response_headers_like eval
[
"X-Cache: HIT from .*",
"X-Cache: HIT from .*"
]
--- raw_response_headers_like eval
[
"Warning: 112 .*",
"Warning: 112 .*"
]
--- no_error_log
[error]
=== TEST 4: Prime cache and expire it
--- http_config eval: $::HttpConfig
--- config
location /stale_if_error_4_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("before_save", function(res)
res.header["Cache-Control"] =
"max-age=0, s-maxage=0, stale-if-error=60, must-revalidate"
end)
handler:run()
}
}
location /stale_if_error_4 {
content_by_lua_block {
ngx.header["Cache-Control"] =
"max-age=3600, s-maxage=60, stale-if-error=60, must-revalidate"
ngx.say("TEST 2")
}
}
--- more_headers
Cache-Control: no-cache
--- request
GET /stale_if_error_4_prx
--- response_body
TEST 2
--- response_headers_like
X-Cache: MISS from .*
--- no_error_log
[error]
=== TEST 4b: Response cannot be served stale (must-revalidate)
--- http_config eval: $::HttpConfig
--- config
location /stale_if_error_4_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /stale_if_error_4 {
return 500;
}
--- request
GET /stale_if_error_4_prx
--- error_code: 500
--- raw_response_headers_unlike
Warning: .*
--- no_error_log
[error]
=== TEST 4c: Prime cache (with valid stale config + proxy-revalidate) and expire
--- http_config eval: $::HttpConfig
--- config
location /stale_if_error_4_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("before_save", function(res)
res.header["Cache-Control"] =
"max-age=0, s-maxage=0, stale-if-error=60, proxy-revalidate"
end)
handler:run()
}
}
location /stale_if_error_4 {
content_by_lua_block {
ngx.header["Cache-Control"] =
"max-age=3600, s-maxage=60, stale-if-error=60, proxy-revalidate"
ngx.say("TEST 2")
}
}
--- more_headers
Cache-Control: no-cache
--- request
GET /stale_if_error_4_prx
--- response_body
TEST 2
--- response_headers_like
X-Cache: MISS from .*
--- no_error_log
[error]
=== TEST 4d: Response cannot be served stale (proxy-revalidate)
--- http_config eval: $::HttpConfig
--- config
location /stale_if_error_4_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /stale_if_error_4 {
return 500;
}
--- request
GET /stale_if_error_4_prx
--- error_code: 500
--- raw_response_headers_unlike
Warning: .*
--- no_error_log
[error]
================================================
FILE: t/02-integration/stale-while-revalidate.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config(extra_lua_config => qq{
package.loaded["state"] = {
req = 1,
}
}, run_worker => 1);
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Prime cache for subsequent tests
--- http_config eval: $::HttpConfig
--- config
location /stale_1_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /stale_1 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600, s-maxage=60, stale-while-revalidate=60"
ngx.say("TEST 1")
}
}
--- more_headers
Cache-Control: no-cache
--- request
GET /stale_1_prx
--- response_body
TEST 1
--- response_headers_like
X-Cache: MISS from .*
--- no_error_log
[error]
=== TEST 1b: Assert standard non-stale behaviours are unaffected.
--- http_config eval: $::HttpConfig
--- config
location /stale_1_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /stale_1 {
content_by_lua_block {
local state = require("state")
ngx.header["Cache-Control"] = "max-age=3600, s-maxage=60, stale-while-revalidate=60"
ngx.print("ORIGIN: ", state.req)
state.req = state.req + 1
}
}
--- more_headers eval
["Cache-Control: no-cache", "Cache-Control: no-store", "Pragma: no-cache", ""]
--- request eval
["GET /stale_1_prx", "GET /stale_1_prx", "GET /stale_1_prx", "GET /stale_1_prx"]
--- response_body eval
["ORIGIN: 1", "ORIGIN: 2", "ORIGIN: 3", "ORIGIN: 3"]
--- response_headers_like eval
["X-Cache: MISS from .*", "X-Cache: MISS from .*", "X-Cache: MISS from .*", "X-Cache: HIT from .*"]
--- raw_response_headers_unlike eval
["Warning: .*", "Warning: .*", "Warning: .*", "Warning: .*"]
--- no_error_log
[error]
=== TEST 2: Prime cache and expire it
--- http_config eval: $::HttpConfig
--- config
location /stale_2_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("before_save", function(res)
res.header["Cache-Control"] = "max-age=0, s-maxage=0, stale-while-revalidate=60"
end)
handler:run()
}
}
location /stale_2 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600, s-maxage=60, stale-while-revalidate=60"
ngx.say("TEST 2")
}
}
--- more_headers
Cache-Control: no-cache
--- request
GET /stale_2_prx
--- response_body
TEST 2
--- response_headers_like
X-Cache: MISS from .*
--- wait: 1
--- no_error_log
[error]
=== TEST 2b: Request does not accept stale, for different reasons
--- http_config eval: $::HttpConfig
--- config
location /stale_2_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("before_save", function(res)
res.header["Cache-Control"] = "max-age=0, s-maxage=0, stale-while-revalidate=60"
end)
handler:run()
}
}
location /stale_2 {
content_by_lua_block {
local state = require("state")
ngx.header["Cache-Control"] = "max-age=3600, s-maxage=60, stale-while-revalidate=60"
ngx.print("ORIGIN: ", state.req)
state.req = state.req + 1
}
}
--- more_headers eval
["Cache-Control: min-fresh=5", "Cache-Control: max-age=1", "Cache-Control: max-stale=1"]
--- request eval
["GET /stale_2_prx", "GET /stale_2_prx", "GET /stale_2_prx"]
--- response_body eval
["ORIGIN: 1", "ORIGIN: 2", "ORIGIN: 3"]
--- response_headers_like eval
["X-Cache: MISS from .*", "X-Cache: MISS from .*", "X-Cache: MISS from .*"]
--- raw_response_headers_unlike eval
["Warning: .*", "Warning: .*", "Warning: .*"]
--- no_error_log
[error]
--- wait: 2
=== TEST 3: Prime cache and expire it
--- http_config eval: $::HttpConfig
--- config
location /stale_3_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("before_save", function(res)
res.header["Cache-Control"] = "max-age=0, s-maxage=0, stale-while-revalidate=60"
end)
handler:run()
}
}
location /stale_3 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600, s-maxage=60, stale-while-revalidate=60"
ngx.print("TEST 3")
}
}
--- more_headers
Cache-Control: no-cache
--- request
GET /stale_3_prx
--- response_body: TEST 3
--- response_headers_like
X-Cache: MISS from .*
--- no_error_log
[error]
=== TEST 3b: Request accepts stale
--- http_config eval: $::HttpConfig
--- config
location /stale_3_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /stale_3 {
content_by_lua_block {
ngx.print("ORIGIN")
}
}
--- more_headers eval
["Cache-Control: max-age=99999", "Cache-Control: max-stale=99999", ""]
--- request eval
["GET /stale_3_prx", "GET /stale_3_prx", "GET /stale_3_prx"]
--- response_body eval
["TEST 3", "TEST 3", "TEST 3"]
--- response_headers_like eval
["X-Cache: HIT from .*", "X-Cache: HIT from .*", "X-Cache: HIT from .*"]
--- raw_response_headers_like eval
["Warning: 110 .*", "Warning: 110 .*", "Warning: 110 .*"]
--- no_error_log
[error]
=== TEST 3c: Let revalidations finish to prevent errors
--- http_config eval: $::HttpConfig
--- config
location /stale_3_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /stale_3 {
content_by_lua_block {
ngx.print("ORIGIN")
}
}
--- request
GET /stale_3_prx
--- response_body: TEST 3
--- wait: 1
--- no_error_log
[error]
=== TEST 4: Prime cache and expire it
--- http_config eval: $::HttpConfig
--- config
location /stale_4_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("before_save", function(res)
res.header["Cache-Control"] = "max-age=0, s-maxage=0, stale-while-revalidate=60, must-revalidate"
end)
handler:run()
}
}
location /stale_4 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600, s-maxage=60, stale-while-revalidate=60, must-revalidate"
ngx.say("TEST 2")
}
}
--- more_headers
Cache-Control: no-cache
--- request
GET /stale_4_prx
--- response_body
TEST 2
--- response_headers_like
X-Cache: MISS from .*
--- wait: 1
--- no_error_log
[error]
=== TEST 4b: Response cannot be served stale (must-revalidate)
--- http_config eval: $::HttpConfig
--- config
location /stale_4_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({ visible_hostname = "ledge.example.com" }):run()
}
}
location /stale_4 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600, s-maxage=60, stale-while-revalidate=60"
ngx.say("ORIGIN")
}
}
--- request
GET /stale_4_prx
--- response_body
ORIGIN
--- response_headers_like
X-Cache: MISS from .*
--- raw_response_headers_unlike
Warning: ledge\.example\.com .*
--- no_error_log
[error]
=== TEST 4c: Prime cache (with valid stale config + proxy-revalidate) and expire it
--- http_config eval: $::HttpConfig
--- config
location /stale_4_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("before_save", function(res)
res.header["Cache-Control"] = "max-age=0, s-maxage=0, stale-while-revalidate=60, proxy-revalidate"
end)
handler:run()
}
}
location /stale_4 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600, s-maxage=60, stale-while-revalidate=60, proxy-revalidate"
ngx.say("TEST 2")
}
}
--- more_headers
Cache-Control: no-cache
--- request
GET /stale_4_prx
--- response_body
TEST 2
--- response_headers_like
X-Cache: MISS from .*
--- wait: 1
--- no_error_log
[error]
=== TEST 4d: Response cannot be served stale (proxy-revalidate)
--- http_config eval: $::HttpConfig
--- config
location /stale_4_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /stale_4 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600, s-maxage=60, stale-while-revalidate=60"
ngx.say("ORIGIN")
}
}
--- request
GET /stale_4_prx
--- response_body
ORIGIN
--- response_headers_like
X-Cache: MISS from .*
--- raw_response_headers_unlike
Warning: .*
--- no_error_log
[error]
=== TEST 5a: Prime cache for subsequent tests
--- http_config eval: $::HttpConfig
--- config
location /stale_5_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("before_save", function(res)
-- immediately expire cache entries
res.header["Cache-Control"] = "max-age=0, s-maxage=0, stale-while-revalidate=60"
end)
handler:run()
}
}
location /stale_5 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600, s-maxage=60, stale-while-revalidate=60"
ngx.say("TEST 5")
}
}
--- more_headers
Cache-Control: no-cache
--- request
GET /stale_5_prx
--- response_body
TEST 5
--- no_error_log
[error]
=== TEST 5b: Return stale
--- http_config eval: $::HttpConfig
--- config
location /stale_5_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("before_save_revalidation_data", function(reval_params, reval_headers)
reval_headers["X-Test"] = ngx.req.get_headers()["X-Test"]
reval_headers["Cookie"] = ngx.req.get_headers()["Cookie"]
end)
handler:run()
}
}
location /stale_5 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("TEST 5b")
local hdr = ngx.req.get_headers()
ngx.say("X-Test: ",hdr["X-Test"])
ngx.say("Cookie: ",hdr["Cookie"])
ngx.say("Authorization: ",hdr["Authorization"])
}
}
--- request
GET /stale_5_prx
--- more_headers
X-Test: foobar
Cookie: baz=qux
Authorization: test
--- response_body
TEST 5
--- wait: 1
--- no_error_log
[error]
=== TEST 5c: Cache has been revalidated, custom headers
--- http_config eval: $::HttpConfig
--- config
location /stale_5_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- request
GET /stale_5_prx
--- response_body
TEST 5b
X-Test: foobar
Cookie: baz=qux
Authorization: test
--- no_error_log
[error]
=== TEST 6: Reset cache, manually remove revalidation data
--- http_config eval: $::HttpConfig
--- config
location /stale_reval_params_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("before_save", function(res)
-- immediately expire cache entries
res.header["Cache-Control"] = "max-age=0, stale-while-revalidate=60"
end)
handler:run()
}
}
location /stale_reval_params {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600, stale-while-revalidate=60"
ngx.print("TEST 6")
}
}
location /stale_reval_params_remove {
rewrite ^(.*)_remove$ $1 break;
content_by_lua_block {
local redis = require("ledge").create_redis_connection()
local handler = require("ledge").create_handler()
handler.redis = redis
local key_chain = handler:cache_key_chain()
redis:del(key_chain.reval_req_headers)
redis:del(key_chain.reval_params)
redis:set_keepalive()
ngx.print("REMOVED")
}
}
--- more_headers
Cache-Control: no-cache
--- request eval
["GET /stale_reval_params_prx", "GET /stale_reval_params_remove"]
--- response_body eval
["TEST 6", "REMOVED"]
--- no_error_log
[error]
=== TEST 6b: Stale revalidation does not choke on missing revalidation data.
--- http_config eval: $::HttpConfig
--- config
location /stale_reval_params_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /stale_reval_params {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600, stale-while-revalidate=60"
ngx.print("TEST 6: ", ngx.req.get_headers()["Cookie"])
}
}
--- more_headers
Cookie: mycookie
--- request
GET /stale_reval_params_prx
--- response_headers_like
Warning: 110 .*
--- error_log
Could not determine expiry for revalidation params. Will fallback to 3600 seconds.
--- response_body: TEST 6
--- wait: 1
--- error_code: 200
=== TEST 6c: Confirm revalidation
--- http_config eval: $::HttpConfig
--- config
location /stale_reval_params_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- request
GET /stale_reval_params_prx
--- no_error_log
[error]
--- response_body: TEST 6: mycookie
--- error_code: 200
=== TEST 7: Prime and immediately expire two keys
--- http_config eval: $::HttpConfig
--- config
location /stale_reval_params_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local handler = require("ledge").create_handler()
handler:bind("before_save", function(res)
-- immediately expire cache entries
res.header["Cache-Control"] = "max-age=0, stale-while-revalidate=60"
end)
handler:run()
}
}
location /stale_reval_params {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3700, stale-while-revalidate=60"
ngx.print("TEST 7: ", ngx.req.get_uri_args()["a"])
}
}
--- more_headers
Cache-Control: no-cache
--- request eval
["GET /stale_reval_params_prx?a=1", "GET /stale_reval_params_prx?a=2"]
--- response_body eval
["TEST 7: 1", "TEST 7: 2"]
--- no_error_log
[error]
=== TEST 7b: Concurrent stale revalidation
--- http_config eval: $::HttpConfig
--- config
location /stale_reval_params_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /stale_reval_params {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600, stale-while-revalidate=60"
ngx.print("TEST 7 Revalidated: ", ngx.req.get_uri_args()["a"])
}
}
--- request eval
["GET /stale_reval_params_prx?a=1", "GET /stale_reval_params_prx?a=2"]
--- no_error_log
[error]
--- response_body eval
["TEST 7: 1", "TEST 7: 2"]
--- wait: 1
=== TEST 7c: Confirm revalidation
--- http_config eval: $::HttpConfig
--- config
location /stale_reval_params_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- request eval
["GET /stale_reval_params_prx?a=1", "GET /stale_reval_params_prx?a=2"]
--- no_error_log
[error]
--- response_body eval
["TEST 7 Revalidated: 1", "TEST 7 Revalidated: 2"]
--- no_error_log
[error]
================================================
FILE: t/02-integration/upstream.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
$ENV{TEST_NGINX_HTML_DIR} ||= html_dir();
$ENV{TEST_NGINX_SOCKET_DIR} ||= $ENV{TEST_NGINX_HTML_DIR};
our $HttpConfig = LedgeEnv::http_config();
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Short read timeout results in error 524.
--- http_config eval: $::HttpConfig
--- config
location /upstream_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
upstream_send_timeout = 5000,
upstream_connect_timeout = 5000,
upstream_read_timeout = 100,
}):run()
}
}
location /upstream {
content_by_lua_block {
ngx.sleep(1)
ngx.say("OK")
}
}
--- request
GET /upstream_prx
--- error_code: 524
--- response_body
--- error_log
timeout
=== TEST 2: No upstream results in a 503.
--- http_config eval: $::HttpConfig
--- config
location /upstream_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
upstream_host = "",
}):run()
}
}
--- request
GET /upstream_prx
--- error_code: 503
--- response_body
--- error_log
upstream connection failed:
=== TEST 3: No port results in 503
--- http_config eval: $::HttpConfig
--- config
location /upstream_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
upstream_host = "127.0.0.1",
upstream_port = "",
}):run()
}
}
--- request
GET /upstream_prx
--- error_code: 503
--- response_body
--- error_log
upstream connection failed:
=== TEST 4: No port with unix socket works
--- http_config eval: $::HttpConfig
--- config
listen unix:$TEST_NGINX_SOCKET_DIR/nginx.sock;
location /upstream_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
upstream_host = "unix:$TEST_NGINX_SOCKET_DIR/nginx.sock",
upstream_port = "",
}):run()
}
}
location /upstream {
echo "OK";
}
--- request
GET /upstream_prx
--- error_code: 200
--- response_body
OK
--- no_error_log
[error]
================================================
FILE: t/02-integration/upstream_client.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config(extra_nginx_config => qq{
lua_shared_dict test_upstream_dict 1m;
}, extra_lua_config => qq{
function create_upstream_client(config)
-- Defaults
config = config or {}
config["timeout"] = config["timeout"] or 100
config["read_timeout"] = config["read_timeout"] or 500
config["host"] = config["host"] or "$LedgeEnv::nginx_host"
config["port"] = config["port"] or $LedgeEnv::nginx_port
return function(handler)
local httpc = require("resty.http").new()
httpc:set_timeout(config.timeout)
local ok, err = httpc:connect(
config.host,
config.port
)
if not ok then
ngx.log(ngx.ERR, "upstream client connection failed: ", err)
return nil
end
httpc:set_timeout(config.read_timeout)
handler.upstream_client = httpc
end
end
require("ledge").bind("before_upstream_connect", function(handler)
if ngx.req.get_uri_args()["skip_init"] then
-- do nothing
else
-- create handler and pass through res
create_upstream_client()(handler)
end
end)
require("ledge").set_handler_defaults({
upstream_host = "",
upstream_port = 9999,
})
}, run_worker => 1);
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Sanity, response returned with upstream_client configured
--- http_config eval: $::HttpConfig
--- config
location /upstream_client_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /upstream_client {
content_by_lua_block {
ngx.say("OK")
}
}
--- request
GET /upstream_client_prx
--- no_error_log
[error]
--- error_code: 200
--- response_body
OK
=== TEST 1b: Sanity, response returned with upstream_client configured at runtime
--- http_config eval: $::HttpConfig
--- config
location /upstream_client_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local h = require("ledge").create_handler()
h:bind("before_upstream_connect", create_upstream_client() )
h:run()
}
}
location /upstream_client {
content_by_lua_block {
ngx.say("OK")
}
}
--- request
GET /upstream_client_prx?skip_init=true
--- no_error_log
[error]
--- error_code: 200
--- response_body
OK
=== TEST 2: Short read timeout results in error 524.
--- http_config eval: $::HttpConfig
--- config
location /upstream_client_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /upstream_client {
content_by_lua_block {
ngx.sleep(1)
ngx.say("OK")
}
}
--- request
GET /upstream_client_prx
--- error_code: 524
--- response_body
--- error_log
timeout
=== TEST 3: No upstream results in a 503.
--- http_config eval: $::HttpConfig
--- config
location /upstream_client_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local h = require("ledge").create_handler()
h:bind("before_upstream_connect", function(handler)
handler.upstream_client = {}
end)
h:run()
}
}
--- request
GET /upstream_client_prx
--- error_code: 503
--- response_body
--- error_log
upstream connection failed
================================================
FILE: t/02-integration/validation.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config();
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Prime cache for subsequent tests
--- http_config eval: $::HttpConfig
--- config
location /validation_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /validation {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.header["Etag"] = "test1"
ngx.header["Last-Modified"] = ngx.http_time(ngx.time() - 100)
ngx.say("TEST 1")
}
}
--- request
GET /validation_prx
--- response_body
TEST 1
--- response_headers_like
X-Cache: MISS from .*
--- no_error_log
[error]
=== TEST 2: Unspecified end-to-end revalidation
max-age=0 + no validator, upstream 200
--- http_config eval: $::HttpConfig
--- config
location /validation_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /validation {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.header["Etag"] = "test2"
ngx.header["Last-Modified"] = ngx.http_time(ngx.time() - 90)
ngx.say("TEST 2")
}
}
--- more_headers
Cache-Control: max-age=0
--- request
GET /validation_prx
--- error_code: 200
--- response_body
TEST 2
--- response_headers_like
X-Cache: MISS from .*
--- no_error_log
[error]
=== TEST 2b: Unspecified end-to-end revalidation
max-age=0 + no validator, upstream 304
--- http_config eval: $::HttpConfig
--- config
location /validation_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /validation {
content_by_lua_block {
ngx.exit(ngx.HTTP_NOT_MODIFIED)
}
}
--- more_headers
Cache-Control: max-age=0
--- request
GET /validation_prx
--- response_body
TEST 2
--- error_code: 200
--- response_headers_like
X-Cache: MISS from .*
--- no_error_log
[error]
=== TEST 3: Revalidate against cache using IMS in the future.
Check we still have headers with our 304, and no body.
--- http_config eval: $::HttpConfig
--- config
location /validation_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
ngx.req.set_header(
"If-Modified-Since",
ngx.http_time(ngx.time() + 100)
)
require("ledge").create_handler():run()
}
}
--- request
GET /validation_prx
--- error_code: 304
--- response_headers
Cache-Control: max-age=3600
Etag: test2
--- response_body
--- no_error_log
[error]
=== TEST 3b: Revalidate against cache using IMS in the past.
Return 200 fresh cache.
--- http_config eval: $::HttpConfig
--- config
location /validation_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
ngx.req.set_header(
"If-Modified-Since",
ngx.http_time(ngx.time() - 100)
)
require("ledge").create_handler():run()
}
}
--- request
GET /validation_prx
--- error_code: 200
--- response_body
TEST 2
--- response_headers_like
X-Cache: HIT from .*
--- no_error_log
[error]
=== TEST 4: Revalidate against cache using Etag.
--- http_config eval: $::HttpConfig
--- config
location /validation_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- more_headers
If-None-Match: test2
--- request
GET /validation_prx
--- error_code: 304
--- response_body
--- no_error_log
[error]
=== TEST 4b: Revalidate against cache using LM and Etag.
--- http_config eval: $::HttpConfig
--- config
location /validation_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
ngx.req.set_header(
"If-Modified-Since",
ngx.http_time(ngx.time() + 100)
)
require("ledge").create_handler():run()
}
}
--- more_headers
If-None-Match: test2
--- request
GET /validation_prx
--- error_code: 304
--- response_body
--- no_error_log
[error]
=== TEST 5: Specific end-to-end revalidation using IMS, upstream 304.
--- http_config eval: $::HttpConfig
--- config
location /validation_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
ngx.req.set_header(
"If-Modified-Since",
ngx.http_time(ngx.time() - 150)
)
require("ledge").create_handler():run()
}
}
location /validation {
content_by_lua_block {
ngx.exit(ngx.HTTP_NOT_MODIFIED)
}
}
--- more_headers
Cache-Control: max-age=0
--- request
GET /validation_prx
--- error_code: 200
--- response_body
TEST 2
--- response_headers_like
X-Cache: MISS from .*
--- no_error_log
[error]
=== TEST 6: Specific end-to-end revalidation
Using INM (matching), upstream 304.
--- http_config eval: $::HttpConfig
--- config
location /validation_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /validation {
content_by_lua_block {
ngx.exit(ngx.HTTP_NOT_MODIFIED)
}
}
--- more_headers
Cache-Control: max-age=0
If-None-Match: test2
--- request
GET /validation_prx
--- error_code: 304
--- no_error_log
[error]
=== TEST 6b: Specific end-to-end revalidation
Using INM (not matching), upstream 304.
--- http_config eval: $::HttpConfig
--- config
location /validation_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /validation {
content_by_lua_block {
ngx.exit(ngx.HTTP_NOT_MODIFIED)
}
}
--- more_headers
Cache-Control: max-age=0
If-None-Match: test6b
--- request
GET /validation_prx
--- error_code: 200
--- response_body
TEST 2
--- response_headers_like
X-Cache: MISS from .*
--- no_error_log
[error]
=== TEST 7: Specific end-to-end revalidation using IMS, upstream 200.
--- http_config eval: $::HttpConfig
--- config
location /validation_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
ngx.req.set_header(
"If-Modified-Since",
ngx.http_time(ngx.time() - 150)
)
require("ledge").create_handler():run()
}
}
location /validation {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.header["Etag"] = "test7"
ngx.header["Last-Modified"] = ngx.http_time(ngx.time() - 70)
ngx.say("TEST 7")
}
}
--- more_headers
Cache-Control: max-age=0
--- request
GET /validation_prx
--- error_code: 200
--- response_body
TEST 7
--- no_error_log
[error]
=== TEST 8: Specific end-to-end revalidation using INM, upstream 200.
--- http_config eval: $::HttpConfig
--- config
location /validation_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /validation {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.header["Etag"] = "test8"
ngx.say("TEST 8")
}
}
--- more_headers
Cache-Control: max-age=0
If-None-Match: test2
--- request
GET /validation_prx
--- error_code: 200
--- response_body
TEST 8
--- response_headers_like
X-Cache: MISS from .*
--- no_error_log
[error]
=== TEST 8b: Unspecified end-to-end revalidation
Using INM, upstream 200, validators now match (so 304 to client).
--- http_config eval: $::HttpConfig
--- config
location /validation_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /validation {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.header["Etag"] = "test8b"
ngx.say("TEST 8b")
}
}
--- more_headers
Cache-Control: max-age=0
If-None-Match: test8b
--- request
GET /validation_prx
--- error_code: 304
--- response_body
--- no_error_log
[error]
=== TEST 8c: Check revalidation re-saved.
--- http_config eval: $::HttpConfig
--- config
location /validation_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- request
GET /validation_prx
--- error_code: 200
--- response_body
TEST 8b
--- response_headers_like
X-Cache: HIT from .*
--- no_error_log
[error]
=== TEST 9: Validators on a cache miss (should never 304).
--- http_config eval: $::HttpConfig
--- config
location /validation_9_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /validation_9 {
content_by_lua_block {
if ngx.req.get_headers()["Cache-Control"] == "max-age=0" and
ngx.req.get_headers()["If-None-Match"] == "test9" then
ngx.exit(ngx.HTTP_NOT_MODIFIED)
else
ngx.say("TEST 9")
end
}
}
--- more_headers
If-None-Match: test9
--- request
GET /validation_9_prx
--- error_code: 200
--- response_body
TEST 9
--- no_error_log
[error]
=== TEST 10: Re-Validation on an a cache miss using INM. Upstream 200, but valid once cached (so 304 to client).
--- http_config eval: $::HttpConfig
--- config
location /validation10_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /validation10 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.header["Etag"] = "test10"
ngx.header["Last-Modified"] = ngx.http_time(ngx.time() - 60)
ngx.say("TEST 10")
}
}
--- more_headers
If-None-Match: test10
--- request
GET /validation10_prx
--- error_code: 304
--- response_body
--- no_error_log
[error]
=== TEST 11: Test badly formatted IMS is ignored.
--- http_config eval: $::HttpConfig
--- config
location /validation10_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- more_headers
If-Modified-Since: 234qr12411224
--- request
GET /validation10_prx
--- error_code: 200
--- response_body
TEST 10
--- response_headers_like
X-Cache: HIT from .*
--- no_error_log
[error]
=== TEST 12: Prime cache
--- http_config eval: $::HttpConfig
--- config
location /validation_12_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /validation_12 {
content_by_lua_block {
ngx.header["Cache-Control"] = "public, max-age=600"
ngx.say("Test 12")
}
}
--- request
GET /validation_12_prx
--- error_code: 200
--- response_body
Test 12
--- no_error_log
[error]
=== TEST 12a: IMS in req and missing LM does not 304
--- http_config eval: $::HttpConfig
--- config
location /validation_12_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /validation_12 {
content_by_lua_block {
ngx.say("Test 12")
}
}
--- more_headers
If-Modified-Since: Tue, 29 Nov 2016 23:16:59 GMT
--- request
GET /validation_12_prx
--- error_code: 200
--- response_body
Test 12
--- no_error_log
[error]
=== TEST 12b: INM in req and missing etag does not 304
--- http_config eval: $::HttpConfig
--- config
location /validation_12_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /validation_12 {
content_by_lua_block {
ngx.say("Test 12")
}
}
--- more_headers
If-None-Match: 1234
--- request
GET /validation_12_prx
--- error_code: 200
--- response_body
Test 12
--- no_error_log
[error]
================================================
FILE: t/02-integration/vary.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config(extra_nginx_config => qq{
lua_shared_dict ledge_test 1m;
lua_check_client_abort on;
});
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Vary
--- http_config eval: $::HttpConfig
--- config
location /vary_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge.state_machine").set_debug(true)
require("ledge").create_handler():run()
}
}
location /vary {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.header["Vary"] = "X-Test"
ngx.print("TEST 1: ", ngx.req.get_headers()["X-Test"])
}
}
--- request eval
["GET /vary_prx", "GET /vary_prx", "GET /vary_prx", "GET /vary_prx"]
--- more_headers eval
[
"X-Test: testval",
"X-Test: anotherval",
"",
"X-Test: testval",
]
--- response_headers_like eval
[
"X-Cache: MISS from .*",
"X-Cache: MISS from .*",
"X-Cache: MISS from .*",
"X-Cache: HIT from .*",
]
--- response_body eval
[
"TEST 1: testval",
"TEST 1: anotherval",
"TEST 1: nil",
"TEST 1: testval",
]
--- no_error_log
[error]
=== TEST 2: Vary change
--- http_config eval: $::HttpConfig
--- config
location /vary_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge.state_machine").set_debug(true)
require("ledge").create_handler():run()
}
}
location /vary {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.header["Vary"] = "X-Test2"
ngx.print("TEST 2: ", ngx.req.get_headers()["X-Test2"], " ", ngx.req.get_headers()["X-Test"])
}
}
--- request eval
["GET /vary_prx", "GET /vary_prx", "GET /vary_prx", "GET /vary_prx"]
--- more_headers eval
[
"X-Test: testval
Cache-Control: no-cache",
"X-Test2: newval",
"",
"X-Test: testval
X-Test2: newval",
]
--- response_headers_like eval
[
"X-Cache: MISS from .*",
"X-Cache: MISS from .*",
"X-Cache: HIT from .*",
"X-Cache: HIT from .*",
]
--- response_body eval
[
"TEST 2: nil testval",
"TEST 2: newval nil",
"TEST 2: nil testval",
"TEST 2: newval nil",
]
--- no_error_log
[error]
=== TEST 3: Cache update changes 1 representation
--- http_config eval: $::HttpConfig
--- config
location /vary3_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge.state_machine").set_debug(true)
require("ledge").create_handler():run()
}
}
location /vary {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.header["Vary"] = "X-Test"
ngx.print("TEST 3: ", ngx.req.get_headers()["X-Test"])
}
}
--- request eval
["GET /vary3_prx", "GET /vary3_prx", "GET /vary3_prx", "GET /vary3_prx"]
--- more_headers eval
[
"X-Test: testval",
"X-Test: value2",
"X-Test: testval
Cache-Control: no-cache",
"X-Test: value2",
]
--- response_headers_like eval
[
"X-Cache: MISS from .*",
"X-Cache: MISS from .*",
"X-Cache: MISS from .*",
"X-Cache: HIT from .*",
]
--- response_body eval
[
"TEST 3: testval",
"TEST 3: value2",
"TEST 3: testval",
"TEST 3: value2",
]
--- no_error_log
[error]
=== TEST 4: Missing keys are cleaned from repset
--- http_config eval: $::HttpConfig
--- config
location /check {
rewrite ^ /vary break;
content_by_lua_block {
local redis = require("ledge").create_redis_connection()
local handler = require("ledge").create_handler()
handler.redis = redis
local res, err = redis:smembers(handler:cache_key_chain().repset)
for _, v in ipairs(res) do
assert(v ~= "foobar", "Key should have been cleaned")
end
ngx.print("OK")
}
}
location /vary_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge.state_machine").set_debug(true)
local redis = require("ledge").create_redis_connection()
local handler = require("ledge").create_handler()
handler.redis = redis
local ok, err = redis:sadd(handler:cache_key_chain().repset, "foobar")
handler:run()
}
}
location /vary {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.header["Vary"] = "X-Test"
ngx.print("TEST 4")
}
}
--- request eval
["GET /vary_prx", "GET /check"]
--- more_headers eval
["Cache-Control: no-cache",""]
--- response_body eval
[
"TEST 4",
"OK"
]
--- no_error_log
[error]
=== TEST 5: Repset TTL maintained
--- http_config eval: $::HttpConfig
--- config
location = /check {
rewrite ^ /vary5 break;
content_by_lua_block {
local redis = require("ledge").create_redis_connection()
local handler = require("ledge").create_handler()
handler.redis = redis
local repset_ttl, err = redis:ttl(handler:cache_key_chain().repset)
if err then ngx.log(ngx.ERR, err) end
local vary_ttl, err = redis:ttl(handler:cache_key_chain().vary)
if err then ngx.log(ngx.ERR, err) end
local count = ngx.shared.ledge_test:get("test5")
if count < 3 then
if (repset_ttl - handler.config.keep_cache_for) <= 300
or (vary_ttl - handler.config.keep_cache_for) <= 300 then
ngx.print("FAIL")
ngx.log(ngx.ERR,
(repset_ttl - handler.config.keep_cache_for),
" ",
(vary_ttl - handler.config.keep_cache_for)
)
else
ngx.print("OK")
end
else
if (repset_ttl - handler.config.keep_cache_for) < 7200
or (vary_ttl - handler.config.keep_cache_for) < 7200 then
ngx.print("FAIL 2")
ngx.log(ngx.ERR,
(repset_ttl - handler.config.keep_cache_for),
" ",
(vary_ttl - handler.config.keep_cache_for)
)
else
ngx.print("OK")
end
end
}
}
location /vary5_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge.state_machine").set_debug(false)
require("ledge").create_handler():run()
}
}
location /vary {
content_by_lua_block {
local incr = ngx.shared.ledge_test:incr("test5", 1, 0)
if incr == 1 then
ngx.header["Cache-Control"] = "max-age=3600"
elseif incr == 3 then
ngx.header["Cache-Control"] = "max-age=7200"
else
ngx.header["Cache-Control"] = "max-age=300"
end
ngx.header["Vary"] = "X-Test"
ngx.print("TEST 5")
}
}
--- request eval
["GET /vary5_prx", "GET /vary5_prx", "GET /check", "GET /vary5_prx", "GET /check"]
--- more_headers eval
[
"Cache-Control: no-cache",
"Cache-Control: no-cache",
"",
"Cache-Control: no-cache",
"",
]
--- response_headers_like eval
[
"X-Cache: MISS from .*",
"X-Cache: MISS from .*",
"",
"X-Cache: MISS from .*",
"",
]
--- response_body eval
[
"TEST 5",
"TEST 5",
"OK",
"TEST 5",
"OK",
]
--- no_error_log
[error]
=== TEST 6: Vary - case insensitive
--- http_config eval: $::HttpConfig
--- config
location /vary6_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge.state_machine").set_debug(true)
require("ledge").create_handler():run()
}
}
location /vary {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
local incr = ngx.shared.ledge_test:incr("test6", 1, 0)
if incr == 1 then
ngx.header["Vary"] = "X-Test"
elseif incr == 2 then
ngx.header["Vary"] = "X-test"
else
ngx.header["Vary"] = "x-Test"
end
ngx.print("TEST 6: ", ngx.req.get_headers()["X-Test"])
}
}
--- request eval
["GET /vary6_prx", "GET /vary6_prx", "GET /vary6_prx"]
--- more_headers eval
[
"X-Test: testval",
"X-test: TestVAL",
"X-teSt: foobar",
]
--- response_headers_like eval
[
"X-Cache: MISS from .*",
"X-Cache: HIT from .*",
"X-Cache: MISS from .*",
]
--- response_body eval
[
"TEST 6: testval",
"TEST 6: testval",
"TEST 6: foobar",
]
--- no_error_log
[error]
=== TEST 7: Vary - sort order
--- http_config eval: $::HttpConfig
--- config
location /vary7_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge.state_machine").set_debug(true)
require("ledge").create_handler():run()
}
}
location /vary {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3700"
local incr = ngx.shared.ledge_test:incr("test7", 1, 0)
if incr == 1 then
-- Prime with 1 order
ngx.header["Vary"] = "X-Test, X-Test2, X-Test3"
elseif incr == 2 then
-- Second request, different order, different values in request
ngx.header["Vary"] = "X-Test3, X-test, X-test2"
end
assert (incr < 3, "Third request should be a cache hit")
ngx.print("TEST 7: ", incr)
}
}
--- request eval
["GET /vary7_prx", "GET /vary7_prx", "GET /vary7_prx"]
--- more_headers eval
[
"X-Test: abc
X-Test2: 123
X-Test3: xyz
",
"X-Test: abc2
X-Test2: 123b
X-Test3: xyz2
",
"X-Test: abc
X-Test2: 123
X-Test3: xyz
",
]
--- response_headers_like eval
[
"X-Cache: MISS from .*
Vary: X-Test, X-Test2, X-Test3",
"X-Cache: MISS from .*
Vary: X-Test3, X-test, X-test2",
"X-Cache: HIT from .*
Vary: X-Test, X-Test2, X-Test3",
]
--- response_body eval
[
"TEST 7: 1",
"TEST 7: 2",
"TEST 7: 1",
]
--- no_error_log
[error]
=== TEST 8: Vary event hook
--- http_config eval: $::HttpConfig
--- config
location /vary8_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge.state_machine").set_debug(true)
local handler = require("ledge").create_handler()
handler:bind("before_vary_selection", function(vary_key)
local x_vary = ngx.req.get_headers()["X-Vary"]
-- Do nothing if noop set
if x_vary ~= "noop" then
vary_key["x-test"] = nil
vary_key["X-Test2"] = x_vary
end
ngx.log(ngx.DEBUG, "Vary Key: ", require("cjson").encode(vary_key))
end)
handler:run()
}
}
location /vary {
content_by_lua_block {
local incr = ngx.shared.ledge_test:incr("test8", 1, 0)
ngx.header["Cache-Control"] = "max-age=3600"
if ngx.req.get_headers()["X-Vary"] == "noop" then
ngx.header["Vary"] = "X-Test2"
else
ngx.header["Vary"] = "X-Test"
end
ngx.print("TEST 8: ", incr)
}
}
--- request eval
["GET /vary8_prx", "GET /vary8_prx", "GET /vary8_prx", "GET /vary8_prx"]
--- more_headers eval
[
"X-Test: testval
X-Vary: foo",
"X-Test: anotherval
X-Vary: foo",
"X-Test2: bar
X-Vary: noop",
"X-Vary: bar",
]
--- response_headers_like eval
[
"X-Cache: MISS from .*",
"X-Cache: HIT from .*",
"X-Cache: MISS from .*",
"X-Cache: HIT from .*",
]
--- response_body eval
[
"TEST 8: 1",
"TEST 8: 1",
"TEST 8: 2",
"TEST 8: 2",
]
--- no_error_log
[error]
=== TEST 9: Other representations are preserved with a no-cache-response
--- http_config eval: $::HttpConfig
--- config
location /vary_9_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge.state_machine").set_debug(true)
require("ledge").create_handler():run()
}
}
location /vary_9 {
content_by_lua_block {
local incr = ngx.shared.ledge_test:incr("test9", 1, 0)
if incr == 3 then
ngx.header["Cache-Control"] = "no-cache"
else
ngx.header["Cache-Control"] = "max-age=60"
end
ngx.header["Vary"] = "X-Test"
ngx.print("TEST 9: ", incr)
}
}
--- request eval
[
"GET /vary_9_prx",
"GET /vary_9_prx",
"GET /vary_9_prx",
"GET /vary_9_prx",
]
--- more_headers eval
[
"X-Test: Foo",
"X-Test: Bar",
"X-Test: Foo
Cache-Control: no-cache",
"X-Test: Bar",
]
--- response_body eval
[
"TEST 9: 1",
"TEST 9: 2",
"TEST 9: 3",
"TEST 9: 2",
]
--- no_error_log
[error]
=== TEST 10: Vary key cleaned up
--- http_config eval: $::HttpConfig
--- config
location /vary_10_check {
rewrite ^(.*)_check$ $1 break;
content_by_lua_block {
local redis = require("ledge").create_redis_connection()
local handler = require("ledge").create_handler()
handler.redis = redis
local chain = handler:cache_key_chain()
local res, err = redis:smembers(chain.repset)
local exists, err = redis:exists(chain.vary)
ngx.print(#res, " ", exists)
}
}
location /vary_10_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /vary_10 {
content_by_lua_block {
local incr = ngx.shared.ledge_test:incr("test10", 1, 0)
if incr < 3 then
ngx.header["Cache-Control"] = "max-age=60"
else
ngx.header["Cache-Control"] = "no-cache"
end
ngx.header["Vary"] = "X-Test"
ngx.print("TEST 10: ", incr)
}
}
--- request eval
[
"GET /vary_10_prx",
"GET /vary_10_prx",
"GET /vary_10_check",
"GET /vary_10_prx",
"GET /vary_10_check",
"GET /vary_10_prx",
"GET /vary_10_check",
]
--- more_headers eval
[
"X-Test: Foo",
"X-Test: Bar",
"",
"X-Test: Foo
Cache-Control: no-cache",
"",
"X-Test: Bar
Cache-Control: no-cache",
"",
]
--- response_body eval
[
"TEST 10: 1",
"TEST 10: 2",
"2 1",
"TEST 10: 3",
"1 1",
"TEST 10: 4",
"0 0",
]
--- no_error_log
[error]
=== TEST 11: Missing repset re-created on read
--- http_config eval: $::HttpConfig
--- config
location /vary_11_break {
rewrite ^(.*)_break $1 break;
content_by_lua_block {
local redis = require("ledge").create_redis_connection()
local handler = require("ledge").create_handler()
handler.redis = redis
local chain = handler:cache_key_chain()
local res, err = redis:del(chain.repset)
local exists, err = redis:exists(chain.repset)
ngx.print(exists)
}
}
location /vary_11_check {
rewrite ^(.*)_check$ $1 break;
content_by_lua_block {
local redis = require("ledge").create_redis_connection()
local handler = require("ledge").create_handler()
handler.redis = redis
local chain = handler:cache_key_chain()
local res, err = redis:smembers(chain.repset)
ngx.print(#res)
}
}
location /vary_11_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /vary_11 {
content_by_lua_block {
local incr = ngx.shared.ledge_test:incr("test11", 1, 0)
if incr < 3 then
ngx.header["Cache-Control"] = "max-age=60"
else
ngx.header["Cache-Control"] = "no-cache"
end
ngx.header["Vary"] = "X-Test"
ngx.print("TEST 11: ", incr)
}
}
--- request eval
[
"GET /vary_11_prx",
"GET /vary_11_prx",
"GET /vary_11_break",
"GET /vary_11_prx",
"GET /vary_11_check",
"GET /vary_11_prx",
"GET /vary_11_check",
]
--- more_headers eval
[
"X-Test: Foo",
"X-Test: Bar",
"",
"X-Test: Foo",
"",
"X-Test: Bar",
"",
]
--- response_body eval
[
"TEST 11: 1",
"TEST 11: 2",
"0",
"TEST 11: 1",
"1",
"TEST 11: 2",
"2",
]
--- response_headers_like eval
[
"X-Cache: MISS from .*",
"X-Cache: MISS from .*",
"",
"X-Cache: HIT from .*",
"",
"X-Cache: HIT from .*",
"",
]
--- no_error_log
[error]
================================================
FILE: t/02-integration/via_header.t
================================================
use Test::Nginx::Socket 'no_plan';
use FindBin;
use lib "$FindBin::Bin/..";
use LedgeEnv;
our $HttpConfig = LedgeEnv::http_config();
no_long_string();
no_diff();
run_tests();
__DATA__
=== TEST 1: Ledge version advertised by default
--- http_config eval: $::HttpConfig
--- config
location /t_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /t {
echo "ORIGIN";
}
--- request
GET /t_prx
--- response_headers_like
Via: \d+\.\d+ .+ \(ledge/\d+\.\d+[\.\d]*\)
--- no_error_log
[error]
=== TEST 2: Ledge version not advertised
--- http_config eval: $::HttpConfig
--- config
location /t_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
advertise_ledge = false,
}):run()
}
}
location /t {
echo "ORIGIN";
}
--- request
GET /t_prx
--- raw_response_headers_unlike: Via: \d+\.\d+ .+ \(ledge/\d+\.\d+[\.\d]*\)
--- no_error_log
[error]
=== TEST 3: Via header uses visible_hostname config
--- http_config eval: $::HttpConfig
--- config
location /t_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
visible_hostname = "ledge.example.com"
}):run()
}
}
location /t {
echo "ORIGIN";
}
--- request
GET /t_prx
--- response_headers_like
Via: \d+\.\d+ ledge.example.com:\d+ \(ledge/\d+\.\d+[\.\d]*\)
--- no_error_log
[error]
=== TEST 4: Via header from upstream
--- http_config eval: $::HttpConfig
--- config
location /t_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /t {
content_by_lua_block {
ngx.header["Via"] = "1.1 foo"
}
}
--- request
GET /t_prx
--- more_headers
Cache-Control: no-cache
--- response_headers_like
Via: \d+\.\d+ .+ \(ledge/\d+\.\d+[\.\d]*\), \d+\.\d+ foo
--- no_error_log
[error]
=== TEST 5: Erroneous multiple Via headers from upstream
--- http_config eval: $::HttpConfig
--- config
location /t_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler({
upstream_port = 1985,
}):run()
}
}
--- tcp_listen: 1985
--- tcp_reply
HTTP/1.1 200 OK
Content-Length: 2
Content-Type: text/plain
Via: 1.1 foo
Via: 1.1 foo.bar
OK
--- request
GET /t_prx
--- more_headers
Cache-Control: no-cache
--- response_body: OK
--- response_headers_like
Via: 1.1 .+ \(ledge/\d+\.\d+[\.\d]*\), 1.1 foo, 1.1 foo.bar
--- no_error_log
[error]
================================================
FILE: t/03-sentinel/01-master_up.t
================================================
use Test::Nginx::Socket 'no_plan';
use Cwd qw(cwd);
my $pwd = cwd();
$ENV{TEST_NGINX_PORT} ||= 1984;
$ENV{TEST_LEDGE_REDIS_DATABASE} ||= 2;
$ENV{TEST_LEDGE_REDIS_QLESS_DATABASE} ||= 3;
$ENV{TEST_LEDGE_SENTINEL_MASTER_NAME} ||= 'mymaster';
$ENV{TEST_LEDGE_SENTINEL_PORT} ||= 6381;
$ENV{TEST_COVERAGE} ||= 0;
our $HttpConfig = qq{
lua_package_path "./lib/?.lua;../lua-resty-redis-connector/lib/?.lua;../lua-resty-qless/lib/?.lua;../lua-resty-http/lib/?.lua;../lua-ffi-zlib/lib/?.lua;;";
init_by_lua_block {
if $ENV{TEST_COVERAGE} == 1 then
require("luacov.runner").init()
end
local db = $ENV{TEST_LEDGE_REDIS_DATABASE}
local qless_db = $ENV{TEST_LEDGE_REDIS_QLESS_DATABASE}
local master_name = '$ENV{TEST_LEDGE_SENTINEL_MASTER_NAME}'
local sentinel_port = $ENV{TEST_LEDGE_SENTINEL_PORT}
local redis_connector_params = {
url = "sentinel://" .. master_name .. ":m/" .. tostring(db),
sentinels = {
{ host = "127.0.0.1", port = sentinel_port },
},
}
require("ledge").configure({
redis_connector_params = redis_connector_params,
qless_db = $ENV{TEST_LEDGE_REDIS_QLESS_DATABASE},
})
require("ledge").set_handler_defaults({
upstream_host = "127.0.0.1",
upstream_port = $ENV{TEST_NGINX_PORT},
storage_driver_config = {
redis_connector_params = redis_connector_params,
}
})
}
init_worker_by_lua_block {
require("ledge").create_worker():run()
}
};
no_long_string();
run_tests();
__DATA__
=== TEST 1: Prime cache
--- http_config eval: $::HttpConfig
--- config
location /sentinel_1_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /sentinel_1 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("OK")
}
}
--- request
GET /sentinel_1_prx
--- response_body
OK
--- no_error_log
[error]
=== TEST 2: create_redis_slave_connection
--- http_config eval: $::HttpConfig
--- config
location /sentinel_1_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
local slave, err = require("ledge").create_redis_slave_connection()
assert(slave and not err,
"create_redis_slave_connection should return positively")
assert(slave:role()[1] == "slave", "role should be slave")
}
}
--- request
GET /sentinel_1_prx
--- no_error_log
[error]
=== TEST 4a: Prime cache
--- http_config eval: $::HttpConfig
--- config
location /sentinel_3_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /sentinel_3 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("OK")
}
}
--- request
GET /sentinel_3_prx
--- response_body
OK
--- no_error_log
[error]
=== TEST 3: Wildcard Purge (scan on slave)
--- http_config eval: $::HttpConfig
--- config
location /sentinel_3 {
content_by_lua_block {
require("ledge").create_handler({
keyspace_scan_count = 1,
}):run()
}
}
--- request
PURGE /sentinel_3*
--- wait: 1
--- no_error_log
[error]
--- error_code: 200
================================================
FILE: t/03-sentinel/02-master_down.t
================================================
use Test::Nginx::Socket 'no_plan';
use Cwd qw(cwd);
my $pwd = cwd();
$ENV{TEST_NGINX_PORT} ||= 1984;
$ENV{TEST_LEDGE_REDIS_DATABASE} ||= 2;
$ENV{TEST_LEDGE_REDIS_QLESS_DATABASE} ||= 3;
$ENV{TEST_LEDGE_SENTINEL_MASTER_NAME} ||= 'mymaster';
$ENV{TEST_LEDGE_SENTINEL_PORT} ||= 6381;
$ENV{TEST_COVERAGE} ||= 0;
our $HttpConfig = qq{
lua_package_path "./lib/?.lua;../lua-resty-redis-connector/lib/?.lua;../lua-resty-qless/lib/?.lua;../lua-resty-http/lib/?.lua;../lua-ffi-zlib/lib/?.lua;;";
lua_socket_log_errors Off;
init_by_lua_block {
if $ENV{TEST_COVERAGE} == 1 then
require("luacov.runner").init()
end
local db = $ENV{TEST_LEDGE_REDIS_DATABASE}
local qless_db = $ENV{TEST_LEDGE_REDIS_QLESS_DATABASE}
local master_name = '$ENV{TEST_LEDGE_SENTINEL_MASTER_NAME}'
local sentinel_port = $ENV{TEST_LEDGE_SENTINEL_PORT}
local redis_connector_params = {
url = "sentinel://" .. master_name .. ":s/" .. tostring(db),
sentinels = {
{ host = "127.0.0.1", port = sentinel_port },
},
}
require("ledge").configure({
redis_connector_params = redis_connector_params,
qless_db = $ENV{TEST_LEDGE_REDIS_QLESS_DATABASE},
})
require("ledge").set_handler_defaults({
upstream_host = "127.0.0.1",
upstream_port = $ENV{TEST_NGINX_PORT},
storage_driver_config = {
redis_connector_params = redis_connector_params,
}
})
}
};
no_long_string();
run_tests();
__DATA__
=== TEST 1: Read from cache (primed in previous test file)
--- http_config eval: $::HttpConfig
--- config
location /sentinel_1_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /sentinel_1 {
echo "ORIGIN";
}
--- request
GET /sentinel_1_prx
--- response_body
OK
--- no_error_log
[error]
=== TEST 2: The write will fail, but well still get a 200 with our new content.
--- http_config eval: $::HttpConfig
--- config
location /sentinel_2_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /sentinel_2 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("TEST 2")
}
}
--- request
GET /sentinel_2_prx
--- response_body
TEST 2
--- error_log
READONLY You can't write against a read only slave.
=== TEST 2b: The write will fail, but we still get a 200 with our content.
--- http_config eval: $::HttpConfig
--- config
location /sentinel_2_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /sentinel_2 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("TEST 2b")
}
}
--- request
GET /sentinel_2_prx
--- response_body
TEST 2b
--- error_log
READONLY You can't write against a read only slave.
================================================
FILE: t/03-sentinel/03-slave_promoted.t
================================================
use Test::Nginx::Socket 'no_plan';
use Cwd qw(cwd);
my $pwd = cwd();
$ENV{TEST_NGINX_PORT} ||= 1984;
$ENV{TEST_LEDGE_REDIS_DATABASE} ||= 2;
$ENV{TEST_LEDGE_REDIS_QLESS_DATABASE} ||= 3;
$ENV{TEST_LEDGE_SENTINEL_MASTER_NAME} ||= 'mymaster';
$ENV{TEST_LEDGE_SENTINEL_PORT} ||= 6381;
$ENV{TEST_COVERAGE} ||= 0;
our $HttpConfig = qq{
lua_package_path "./lib/?.lua;../lua-resty-redis-connector/lib/?.lua;../lua-resty-qless/lib/?.lua;../lua-resty-http/lib/?.lua;../lua-ffi-zlib/lib/?.lua;;";
init_by_lua_block {
if $ENV{TEST_COVERAGE} == 1 then
require("luacov.runner").init()
end
local db = $ENV{TEST_LEDGE_REDIS_DATABASE}
local qless_db = $ENV{TEST_LEDGE_REDIS_QLESS_DATABASE}
local master_name = '$ENV{TEST_LEDGE_SENTINEL_MASTER_NAME}'
local sentinel_port = $ENV{TEST_LEDGE_SENTINEL_PORT}
local redis_connector_params = {
url = "sentinel://" .. master_name .. ":a/" .. tostring(db),
sentinels = {
{ host = "127.0.0.1", port = sentinel_port },
},
}
require("ledge").configure({
redis_connector_params = redis_connector_params,
qless_db = $ENV{TEST_LEDGE_REDIS_QLESS_DATABASE},
})
require("ledge").set_handler_defaults({
upstream_host = "127.0.0.1",
upstream_port = $ENV{TEST_NGINX_PORT},
storage_driver_config = {
redis_connector_params = redis_connector_params,
}
})
}
};
no_long_string();
run_tests();
__DATA__
=== TEST 1: Read from cache (primed in previous test file)
--- http_config eval: $::HttpConfig
--- config
location /sentinel_1 {
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- request
GET /sentinel_1
--- response_body
OK
--- no_error_log
[error]
=== TEST 2: The write will succeed, as our slave has been promoted.
--- http_config eval: $::HttpConfig
--- config
location /sentinel_2_prx {
rewrite ^(.*)_prx$ $1 break;
content_by_lua_block {
require("ledge").create_handler():run()
}
}
location /sentinel_2 {
content_by_lua_block {
ngx.header["Cache-Control"] = "max-age=3600"
ngx.say("TEST 2")
}
}
--- request
GET /sentinel_2_prx
--- response_body
TEST 2
=== TEST 2b: Test for cache hit.
--- http_config eval: $::HttpConfig
--- config
location /sentinel_2 {
content_by_lua_block {
require("ledge").create_handler():run()
}
}
--- request
GET /sentinel_2
--- response_body
TEST 2
================================================
FILE: t/LedgeEnv.pm
================================================
package LedgeEnv;
use strict;
use warnings;
use Exporter;
our $nginx_host = $ENV{TEST_NGINX_HOST} || '127.0.0.1';
our $nginx_port = $ENV{TEST_NGINX_PORT} || 1984;
our $test_coverage = $ENV{TEST_COVERAGE} || 0;
our $redis_host = $ENV{TEST_LEDGE_REDIS_HOST} || '127.0.0.1';
our $redis_port = $ENV{TEST_LEDGE_REDIS_PORT} || 6379;
our $redis_database = $ENV{TEST_LEDGE_REDIS_DATABASE} || 2;
our $redis_qless_database = $ENV{TEST_LEDGE_REDIS_QLESS_DATABASE} || 3;
sub http_config {
my $extra_nginx_config = "";
my $extra_lua_config = "";
my $worker_config = "";
my (%args) = @_;
if (defined $args{extra_nginx_config}) {
$extra_nginx_config = $args{extra_nginx_config};
}
if (defined $args{extra_lua_config}) {
$extra_lua_config = $args{extra_lua_config};
}
if ($args{run_worker}) {
$worker_config = qq{
init_worker_by_lua_block {
require("ledge").create_worker():run()
}
};
}
return qq{
$extra_nginx_config
lua_package_path "./lib/?.lua;./extlib/?.lua;;";
resolver local=on ipv6=off;
init_by_lua_block {
if $test_coverage == 1 then
require("luacov.runner").init()
end
local REDIS_URL = "redis://$redis_host:$redis_port/$redis_database"
require("ledge").configure({
redis_connector_params = { url = REDIS_URL },
qless_db = $redis_qless_database,
})
require("ledge").set_handler_defaults({
upstream_host = "$nginx_host",
upstream_port = $nginx_port,
storage_driver_config = {
redis_connector_params = { url = REDIS_URL },
},
})
$extra_lua_config;
}
$worker_config
}
}
our @EXPORT = qw( http_config );
1;
================================================
FILE: t/cert/example.com.crt
================================================
-----BEGIN CERTIFICATE-----
MIIDZDCCAkwCCQC9pPAJEKdAJTANBgkqhkiG9w0BAQsFADCBiTELMAkGA1UEBhMC
VUsxDzANBgNVBAgMBkxvbmRvbjEPMA0GA1UEBwwGTG9uZG9uMREwDwYDVQQKDAhT
cXVpeiBVSzEQMA4GA1UECwwHSG9zdGluZzESMBAGA1UEAwwJRWRnZSBUZXN0MR8w
HQYJKoZIhvcNAQkBFhBlZGdlQHNxdWl6LmNvLnVrMCAXDTE5MTExMjIyNDA0MFoY
DzIxMTkxMDE5MjI0MDQwWjBcMQswCQYDVQQGEwJVSzETMBEGA1UECAwKU29tZS1T
dGF0ZTEPMA0GA1UEBwwGTG9uZG9uMREwDwYDVQQKDAhTcXVpeiBVSzEUMBIGA1UE
AwwLZXhhbXBsZS5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDr
jlyu9bPMemMkqfe0fi+HaLfXsYaMguJyzaOKIf11RAHG4Ptl3XHk4a6OrKR3MFuC
MnGmOuPOAJPRwGJ2PMNX6g3dI0UsEqMxdOEadJsaP/kcV22OmVTDpErdbADItk8h
tgCvo+QWUIIFCUMbd8t2nJpgusnhyfyhipwBiBTaiflANYFfINty8D39ohgHzg6j
tTiOLf2jBEurFJkekb8bu2kWcDxFv0lpR4VurMpvaguxuAM2XhpQVjHzqhJ28AlG
BJY8KV4OPZb3Qz+rZnojat3QKVoDIJc42pFRAUFTyanBF/m8ayZqOOZdH90t8bcj
dLEKMgUBHB9vnDDOOVfnAgMBAAEwDQYJKoZIhvcNAQELBQADggEBAHbAH10eDzlq
B3QXMHidAwZhaYEwnfqFINSYThLir9o8/WtgiMvhOaD5BXZNvuoePsYjZZyYIx14
NF+xJe0Ijnh15RnAtNBDuw+NkGKVqtszdW1SNBkU9bH4rJXJYJOkHORkIRLRWlwl
/YR/YpXOwPSKCIgl9K8H3FsuAbjNB+sjUsaSsbyTKbOVUB67BvjDSb0e0UZwSBtc
9wWLAdHL2gZQD+tsX/vEv1F0QdaKsBEjMfYCuLfk0Ov51hKLXfyrIG08T7Csm9Mf
qvIku5Itl4AWNbGQZpXnbqtUHOF1OaWBe9i5xNoMtvv+WWqJcun9NQBsdMnpNMpE
MF3NccQR5iA=
-----END CERTIFICATE-----
================================================
FILE: t/cert/example.com.key
================================================
-----BEGIN RSA PRIVATE KEY-----
MIIEpQIBAAKCAQEA645crvWzzHpjJKn3tH4vh2i317GGjILics2jiiH9dUQBxuD7
Zd1x5OGujqykdzBbgjJxpjrjzgCT0cBidjzDV+oN3SNFLBKjMXThGnSbGj/5HFdt
jplUw6RK3WwAyLZPIbYAr6PkFlCCBQlDG3fLdpyaYLrJ4cn8oYqcAYgU2on5QDWB
XyDbcvA9/aIYB84Oo7U4ji39owRLqxSZHpG/G7tpFnA8Rb9JaUeFbqzKb2oLsbgD
Nl4aUFYx86oSdvAJRgSWPCleDj2W90M/q2Z6I2rd0ClaAyCXONqRUQFBU8mpwRf5
vGsmajjmXR/dLfG3I3SxCjIFARwfb5wwzjlX5wIDAQABAoIBADOdBgHwJG1xg7fM
5lHONGvfLik85NZ091lgZa0mtXq0ZA9HzM4NL5+PM8hfW8oh9msY0n4x+ShyR/F1
zh1KQyNITbFewRFfJBL6ITjCxBmEWvkyzvan8kLMBPtvZtyT1dL1JkFWD+wzx8mC
tgmWviZHOixnwUSQFaLv1C8hujAID82wkoSiEhCgl+B69JxHG9zwsTiKmBnt8iTb
URD38MPGxJkpGdzkWScb0nlSsbm8IZpPbEll0HU5UYjB0vkuv4tt5Ou23AqxTde0
psC1WZYa4qyomycATX9+PykBFEA0Qkd56/oHnFMZTmPQKUevqtq3axMOeUc6vqjJ
MEf5+8ECgYEA+MFDdhX5leLh0CxZf4PuHgaHTP9EITL9obU3Nh87fZYRlFVDjwpn
7ZMsAJw4f7uTXYH6j5e0oQ6KmCCk65Ak89/8roq+sbymm5AGLs07PGtUYKF1goq6
JqhMBslWPD8rkWMiUzRtlf7yQq6iA2gOQTecRKKfbjYT9vNFPqkB51sCgYEA8mqv
3Y4ZUX6cCkAwsA+TEOqKOh52VCgJ3xtrL7iUjJ2hsfT8avM1TsvklSGob4/LWqPQ
KN+EExaM3vULydxZ+WgSvOD73OyFSa7J25NdHdqFkyde2DC+f2aPa7zcRanuD1DD
h6e6/Z3OGa2pZq7Ed5cW7gAljSLQJZIFNN65A2UCgYEAo+VSOX+JDoSKG8rcvPOD
9CyBAO4/SVB7ZAwt8G7rl3dE5eK3vIsypomNOGm1oBNKqRV2rR1bWbJnBoybnMlA
T56IsceglSKi82QVbsix+sEMuw4ming05juEvAPz2YYVgpk6iG/GtElh/SVqgawR
mE63m1E6kjb3OIJYYUyhgHkCgYEAr9USmvFnC+V56TWGGy4wziRQ/rb5vTENd/a7
WHHZzeTIU/wO2sRt9imOM12mfsUeCzCm2/7EHdRNearkUhaybGVAsh++kBA+3aMa
Z1oMQIswN/xmnwk8I8yQWuUyIJWRRyqdqNfQmgTMaXO9W+2IM/Yze44/ro+Byr6P
aDnkmMECgYEAsHbqYEwD54neIKyNxlmYemybqqh63JVFp84VomJxMfHumVVW/UxQ
Pt7jKgbsvO7ayBzETtSpaQ3ajCiiKPKlrupqJDb9yXwSpdfbf6fRamQLDvVWJNkD
ZjLBiOUX5dS4Il0kEvUCeikgyFRssEFP6J5c2+Cbr5Gt12EnIQhu//I=
-----END RSA PRIVATE KEY-----
================================================
FILE: t/cert/rootCA.key
================================================
-----BEGIN RSA PRIVATE KEY-----
MIIEpAIBAAKCAQEAy6v3eQFGXrSe97M/9ullfby4bg2jF+9h9p5P/wiv+Y9dTdk8
DRR7M40laCqnmoXSSndAvHLb+lqeHs0GDkp+ofgBg8iP8p8IX0RQ33lqcrU65FJu
ImMksswkYZLak78YpR1wqF4EMpHH6t7SKkPlFgULkg+vTpml83fFdnq2WVk8UOSx
AsrllvVtxys8c8ny0hmBXqOWtISPQFYBNAy7N7uHLF8D/4/YMBHM2Vgp5oaMkvFX
nsDyL6JWvnqTnz1kKjL9lbYHbxP7oziI74MIRvRzsw8Ou+OfWiYKTRZDlxfl+7Jr
zOhxjbTpP/cOCw3ngBC6rrpdflDVuyf6r7rv2wIDAQABAoIBABtC1Uj5BrY+btiw
wWsHKnJ+BCGW6bGWdQJRhluYihVZPx/gZ81IZIUt60faDbz9FHyrIZsXtKH55xgw
URMwnWqIi4tcGQhciP5XYovG8JyR7WQKNHud0ZetA2GcCm2kMmRHYIDotJ8gLCYf
1PmbRNqBql7OgqR+pFvGOEP3gNjMf66m+A0jnSrXk2zLBGeo3hfEBpsWsm2fay5T
Z8r5PlUn8rXSBkVCK9AGGf2Ngg07Z+2wj08Htn+hq3+kTNf65D/buVpkg9kNqrxm
cDy1v182KBiM/jXjydoBt33GKxNkG4Ds6eH0W2hFzP8oOmYq4DCZ1BryUGhR+U6c
izj6/SECgYEA8iw2e0blUsd2Esi5iD/sDr3ZmQONEeWCTv9Twwvl0cu3h5BRjxH5
ypq/F1B1matjTFPxXY+PpVSj1dbYRGydyAfzaRwcox/ox/P+0h76zQeb3BMSnpmK
g7dpNCARJVW9NbtDam9Y6UBgNZU3giBBaE8pKlrwwnhRTcrlfLmQIk0CgYEA10z+
U/cXWh9SlGqz+XcRq4+Tp3oZ+gBVdartwJjjSLl2DXMOnxchhSw0pkCkAURvGEps
+mvopXn7fmpENOyXFN/TrnH09qY1exDl/xkzPvR5akCy0HkkAERtpYMfPfYzYYrI
G+W5olDMM5SmSU7rmKeQsTlVtNyavbx9+TB4XscCgYEAl20D6BONgzRLZTVzpXlq
zlDxxdbNl9otn93Rb016N7OtH6wjA1XXHlOilx5tWlgrb+exLbJ9vIBvLV/4vNg5
1ID8N8YnNezW7mhn9tT+N8PBNlwKsXcKgI/nzXsbnX++HuHoJp5XNwpU3kxeeBRZ
MbMF54ETuFXpaL4svs99C6UCgYAuw7d+T25QEfui5yZeakF5TT9aIkhgKBBn9Y+c
xNihZD9DHpmvbpvGTFrHPcUhzVaAJTJUlnm676rzw2s7P6R1UUSuYGw/4sw9Beef
KD8cTofMz27Hn3h1YmeaisePctmoNzfN73EJ05j3HzObOrwrtUHVbMmz9jLaQYXv
SVrr4wKBgQDfpZKufKQEX4q/L5OUs03hxRVEkSwtWwvIjXWFqbqvhkSEqB04+bns
3PhhsqVGa4nGmgua6f4/GPy2OAiniIbVupoyz/i3co8usixihH6U0/JhRJO6HHU2
te+2zL1GIHHBAA4fmBIaHXtBC7kta5Ck8RUJ2eSMQ4TFl091wvJxfg==
-----END RSA PRIVATE KEY-----
================================================
FILE: t/cert/rootCA.pem
================================================
-----BEGIN CERTIFICATE-----
MIIDkjCCAnoCCQCRNvMmzZMQezANBgkqhkiG9w0BAQsFADCBiTELMAkGA1UEBhMC
VUsxDzANBgNVBAgMBkxvbmRvbjEPMA0GA1UEBwwGTG9uZG9uMREwDwYDVQQKDAhT
cXVpeiBVSzEQMA4GA1UECwwHSG9zdGluZzESMBAGA1UEAwwJRWRnZSBUZXN0MR8w
HQYJKoZIhvcNAQkBFhBlZGdlQHNxdWl6LmNvLnVrMCAXDTE5MTExMjIyMzcxNloY
DzIxMTkxMDE5MjIzNzE2WjCBiTELMAkGA1UEBhMCVUsxDzANBgNVBAgMBkxvbmRv
bjEPMA0GA1UEBwwGTG9uZG9uMREwDwYDVQQKDAhTcXVpeiBVSzEQMA4GA1UECwwH
SG9zdGluZzESMBAGA1UEAwwJRWRnZSBUZXN0MR8wHQYJKoZIhvcNAQkBFhBlZGdl
QHNxdWl6LmNvLnVrMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAy6v3
eQFGXrSe97M/9ullfby4bg2jF+9h9p5P/wiv+Y9dTdk8DRR7M40laCqnmoXSSndA
vHLb+lqeHs0GDkp+ofgBg8iP8p8IX0RQ33lqcrU65FJuImMksswkYZLak78YpR1w
qF4EMpHH6t7SKkPlFgULkg+vTpml83fFdnq2WVk8UOSxAsrllvVtxys8c8ny0hmB
XqOWtISPQFYBNAy7N7uHLF8D/4/YMBHM2Vgp5oaMkvFXnsDyL6JWvnqTnz1kKjL9
lbYHbxP7oziI74MIRvRzsw8Ou+OfWiYKTRZDlxfl+7JrzOhxjbTpP/cOCw3ngBC6
rrpdflDVuyf6r7rv2wIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQAuBBRjAQ/ZEwNa
UXkCZPc+3QxzWrKQCXcf8WoQYb73KaQKRd5gl8QQNWRkmiFdBngDwitd1xGVn0S3
d0jHQAvreWUztiv3fu/Uf/fGv0BWJw9ve9+Wuw4ENINR6rQbRpecXW9Ia/4Jep0w
pYFvNLBFUqPzukrdkf8UdCLyyl4H/gWENtgjgvURAxKCDJGkd3XiiBirT2837mNT
oRweVDY8gxECd+Os2OIDL4B6mon2m3oSEiJpL72bxsX0rwwc7dKdsuOrjuKyg+Jb
okTqY3oO5UzwEDVuKwuOOdvpO11LhtQ7SfjZiMQW2NAHeBtJqNgxcYIbJPeU1Zli
aSxKnsds
-----END CERTIFICATE-----
================================================
FILE: t/cert/rootCA.srl
================================================
BDA4F00910A74025
================================================
FILE: util/lua-releng
================================================
#!/usr/bin/env perl
use strict;
use warnings;
use Getopt::Std;
my (@luas, @tests);
my %opts;
getopts('Lse', \%opts) or die "Usage: lua-releng [-L] [-s] [-e] [files]\n";
my $silent = $opts{s};
my $stop_on_error = $opts{e};
my $no_long_line_check = $opts{L};
my $check_lua_ver = "luac -v | awk '{print\$2}'| grep 5.1";
my $output = `$check_lua_ver`;
if ($output eq '') {
die "ERROR: lua-releng ONLY supports Lua 5.1!\n";
}
if ($#ARGV != -1) {
@luas = @ARGV;
} else {
@luas = map glob, qw{ *.lua lib/*.lua lib/*/*.lua lib/*/*/*.lua lib/*/*/*/*.lua lib/*/*/*/*/*.lua };
if (-d 't') {
@tests = map glob, qw{ t/*.t t/*/*.t t/*/*/*.t };
}
}
for my $f (sort @luas) {
process_file($f);
}
for my $t (@tests) {
blank(qq{grep -H -n --color -E '\\--- ?(ONLY|LAST)' $t});
}
# p: prints a string to STDOUT appending \n
# w: prints a string to STDERR appending \n
# Both respect the $silent value
sub p { print "$_[0]\n" if (!$silent) }
sub w { warn "$_[0]\n" if (!$silent) }
# blank: runs a command and looks at the output. If the output is not
# blank it is printed (and the program dies if stop_on_error is 1)
sub blank {
my ($command) = @_;
if ($stop_on_error) {
my $output = `$command`;
if ($output ne '') {
die $output;
}
} else {
system($command);
}
}
my $version;
sub process_file {
my $file = shift;
# Check the sanity of each .lua file
open my $in, $file or
die "ERROR: Can't open $file for reading: $!\n";
my $found_ver;
while (<$in>) {
my ($ver, $skipping);
if (/(?x) (?:_VERSION|version) \s* = .*? ([\d\.]*\d+) (.*? SKIP)?/) {
my $orig_ver = $ver = $1;
$found_ver = 1;
$skipping = $2;
$ver =~ s{^(\d+)\.(\d{3})(\d{3})$}{join '.', int($1), int($2), int($3)}e;
w("$file: $orig_ver ($ver)");
last;
} elsif (/(?x) (?:_VERSION|version) \s* = \s* ([a-zA-Z_]\S*)/) {
w("$file: $1");
$found_ver = 1;
last;
}
if ($ver and $version and !$skipping) {
if ($version ne $ver) {
die "$file: $ver != $version\n";
}
} elsif ($ver and !$version) {
$version = $ver;
}
}
if (!$found_ver) {
w("WARNING: No \"_VERSION\" or \"version\" field found in `$file`.");
}
close $in;
p("Checking use of Lua global variables in file $file...");
p("\top no.\tline\tinstruction\targs\t; code");
blank("luac -p -l $file | grep -E '[GS]ETGLOBAL' | grep -vE '\\<(require|type|tostring|error|ngx|ndk|jit|setmetatable|getmetatable|string|table|io|os|print|tonumber|math|pcall|xpcall|unpack|pairs|ipairs|assert|module|package|coroutine|[gs]etfenv|next|rawget|rawset|rawlen|select|loadstring)\\>'");
unless ($no_long_line_check) {
p("Checking line length exceeding 80...");
blank("grep -H -n -E --color '.{81}' $file");
}
}