Initial commit default tip

Thu, 07 Apr 2022 18:11:33 +0100

author
Matthew Wild <mwild1@gmail.com>
date
Thu, 07 Apr 2022 18:11:33 +0100
changeset 0
89e39cd5a7cd

Initial commit

main.lua file | annotate | diff | comparison | revisions
util/dbuffer.lua file | annotate | diff | comparison | revisions
util/queue.lua file | annotate | diff | comparison | revisions
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/main.lua	Thu Apr 07 18:11:33 2022 +0100
@@ -0,0 +1,125 @@
+#!/usr/bin/env lua5.3
+
+local ciphers = require "openssl.cipher";
+local kdf = require "openssl.kdf";
+local zlib = require "zlib";
+local sqlite = require "lsqlite3";
+
+local dbuffer = require "util.dbuffer";
+
+local file_password = assert(arg[2], "no password specified");
+
+local input_filename = assert(arg[1], "no ceb file specified");
+local db_filename = input_filename:gsub("%.ceb$", "")..".sqlite3";
+
+local function read_header(f)
+	local function read_int()
+		return (">i4"):unpack(f:read(4));
+	end
+	local function read_short()
+		return (">i2"):unpack(f:read(2));
+	end
+	local function read_long()
+		return (">i8"):unpack(f:read(8));
+	end
+	local function read_string()
+		local n = read_short();
+		return f:read(n);
+	end
+
+	return {
+		version = read_int();
+		app_id = read_string();
+		jid = read_string();
+		timestamp = math.floor(read_long()/1000);
+		iv = f:read(12);
+		salt = f:read(16);
+	};
+end
+
+local f = io.open(input_filename);
+
+local header = read_header(f);
+
+print("version", header.version);
+print("app", header.app_id);
+print("jid", header.jid);
+print("timestamp", os.date("%c", header.timestamp));
+
+
+local function generate_key(password, salt)
+	return kdf.derive({
+		type = "PBKDF2";
+		md = "sha1";
+		pass = password;
+		salt = salt;
+		iter = 1024;
+		outlen = 128/8;
+	});
+end
+
+print("k", #(generate_key(file_password, header.salt)));
+
+local decryption_key = generate_key(file_password, header.salt);
+
+local cipher = ciphers.new("AES-128-GCM"):decrypt(decryption_key, header.iv);
+
+local decompress = zlib.inflate();
+
+local db = sqlite.open(db_filename);
+do
+	local db_tables = {
+		[[create table accounts (uuid text primary key, username text, server text, password text, display_name text, status number, status_message text, rosterversion text, options number, avatar text, keys text, hostname text, port number, resource text)]];
+		[[create table conversations (uuid text, accountUuid text, name text, contactUuid text, contactJid text, created number, status number, mode number, attributes text)]];
+		[[create table messages (uuid text, conversationUuid text, timeSent number, counterpart text, trueCounterpart text, body text, encryption number, status number, type number, relativeFilePath text, serverMsgId text, axolotl_fingerprint text, carbon number, edited number, read number, oob number, errorMsg text, readByMarkers text, markable number, remoteMsgId text, deleted number, bodyLanguage text)]];
+		[[create table prekeys (account text, id text, key text)]];
+		[[create table signed_prekeys (account text, id text, key text)]];
+		[[create table sessions (account text, name text, device_id text, key text)]];
+		[[create table identities (account text, name text, ownkey text, fingerprint text, certificate text, trust number, active number, last_activation number, key text)]];
+	};
+
+	for _, query in ipairs(db_tables) do
+		db:exec(query);
+	end
+end
+
+local buffer = dbuffer.new(1024*1024, 128);
+
+repeat
+	local enc_data = f:read(4096);
+	if not enc_data then break; end
+
+	local gz_data = assert(cipher:update(enc_data));
+
+	local status, data, eof = pcall(decompress, gz_data);
+	if not status then
+		print("EE: Failed to decompress: "..tostring(data));
+		return 1;
+	end
+
+	buffer:write(data);
+
+	local line = buffer:read_until("\n");
+	local query_buffer;
+	while line do
+		local balanced_quotes = select(2, line:gsub("'", "%0")) % 2 == 0;
+
+		if query_buffer then
+			table.insert(query_buffer, line);
+			if not balanced_quotes then
+				db:exec(table.concat(query_buffer, "\n"));
+				query_buffer = nil;
+			end
+		else
+			if balanced_quotes then
+				db:exec(line);
+			else
+				query_buffer = { line };
+			end
+		end
+
+		line = buffer:read_until("\n");
+	end
+until eof
+
+print("Done");
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/util/dbuffer.lua	Thu Apr 07 18:11:33 2022 +0100
@@ -0,0 +1,213 @@
+local queue = require "util.queue";
+
+local s_byte, s_sub = string.byte, string.sub;
+local dbuffer_methods = {};
+local dynamic_buffer_mt = { __name = "dbuffer", __index = dbuffer_methods };
+
+function dbuffer_methods:write(data)
+	if self.max_size and #data + self._length > self.max_size then
+		return nil;
+	end
+	local ok = self.items:push(data);
+	if not ok then
+		self:collapse();
+		ok = self.items:push(data);
+	end
+	if not ok then
+		return nil;
+	end
+	self._length = self._length + #data;
+	return true;
+end
+
+function dbuffer_methods:read_chunk(requested_bytes)
+	local chunk, consumed = self.items:peek(), self.front_consumed;
+	if not chunk then return; end
+	local chunk_length = #chunk;
+	local remaining_chunk_length = chunk_length - consumed;
+	if not requested_bytes then
+		requested_bytes = remaining_chunk_length;
+	end
+	if remaining_chunk_length <= requested_bytes then
+		self.front_consumed = 0;
+		self._length = self._length - remaining_chunk_length;
+		self.items:pop();
+		assert(#chunk:sub(consumed + 1, -1) == remaining_chunk_length);
+		return chunk:sub(consumed + 1, -1), remaining_chunk_length;
+	end
+	local end_pos = consumed + requested_bytes;
+	self.front_consumed = end_pos;
+	self._length = self._length - requested_bytes;
+	assert(#chunk:sub(consumed + 1, end_pos) == requested_bytes);
+	return chunk:sub(consumed + 1, end_pos), requested_bytes;
+end
+
+function dbuffer_methods:read(requested_bytes)
+	local chunks;
+
+	if requested_bytes and requested_bytes > self._length then
+		return nil;
+	end
+
+	local chunk, read_bytes = self:read_chunk(requested_bytes);
+	if not requested_bytes then
+		return chunk;
+	elseif chunk then
+		requested_bytes = requested_bytes - read_bytes;
+		if requested_bytes == 0 then -- Already read everything we need
+			return chunk;
+		end
+		chunks = {};
+	else
+		return nil;
+	end
+
+	-- Need to keep reading more chunks
+	while chunk do
+		table.insert(chunks, chunk);
+		if requested_bytes > 0 then
+			chunk, read_bytes = self:read_chunk(requested_bytes);
+			requested_bytes = requested_bytes - read_bytes;
+		else
+			break;
+		end
+	end
+
+	return table.concat(chunks);
+end
+
+-- Read to, and including, the specified character sequence (return nil if not found)
+function dbuffer_methods:read_until(char)
+	local buffer_pos = 0;
+	for i, chunk in self.items:items() do
+		local start = 1 + ((i == 1) and self.front_consumed or 0);
+		local char_pos = chunk:find(char, start, true);
+		if char_pos then
+			return self:read(1 + buffer_pos + char_pos - start);
+		end
+		buffer_pos = buffer_pos + #chunk - (start - 1);
+	end
+	return nil;
+end
+
+function dbuffer_methods:discard(requested_bytes)
+	if requested_bytes > self._length then
+		return nil;
+	end
+
+	local chunk, read_bytes = self:read_chunk(requested_bytes);
+	if chunk then
+		requested_bytes = requested_bytes - read_bytes;
+		if requested_bytes == 0 then -- Already read everything we need
+			return true;
+		end
+	else
+		return nil;
+	end
+
+	while chunk do
+		if requested_bytes > 0 then
+			chunk, read_bytes = self:read_chunk(requested_bytes);
+			requested_bytes = requested_bytes - read_bytes;
+		else
+			break;
+		end
+	end
+	return true;
+end
+
+-- Normalize i, j into absolute offsets within the
+-- front chunk (accounting for front_consumed), and
+-- ensure there is enough data in the first chunk
+-- to cover any subsequent :sub() or :byte() operation
+function dbuffer_methods:_prep_sub(i, j)
+	if j == nil then
+		j = -1;
+	end
+	if j < 0 then
+		j = self._length + (j+1);
+	end
+	if i < 0 then
+		i = self._length + (i+1);
+	end
+	if i < 1 then
+		i = 1;
+	end
+	if j > self._length then
+		j = self._length;
+	end
+	if i > j then
+		return nil;
+	end
+
+	self:collapse(j);
+
+	if self.front_consumed > 0 then
+		i = i + self.front_consumed;
+		j = j + self.front_consumed;
+	end
+
+	return i, j;
+end
+
+function dbuffer_methods:sub(i, j)
+	i, j = self:_prep_sub(i, j);
+	if not i then
+		return "";
+	end
+	return s_sub(self.items:peek(), i, j);
+end
+
+function dbuffer_methods:byte(i, j)
+	i = i or 1;
+	j = j or i;
+	i, j = self:_prep_sub(i, j);
+	if not i then
+		return;
+	end
+	return s_byte(self.items:peek(), i, j);
+end
+
+function dbuffer_methods:length()
+	return self._length;
+end
+dbuffer_methods.len = dbuffer_methods.length; -- strings have :len()
+dynamic_buffer_mt.__len = dbuffer_methods.length; -- support # operator
+
+function dbuffer_methods:collapse(bytes)
+	bytes = bytes or self._length;
+
+	local front_chunk = self.items:peek();
+
+	if not front_chunk or #front_chunk - self.front_consumed >= bytes then
+		return;
+	end
+
+	local front_chunks = { front_chunk:sub(self.front_consumed+1) };
+	local front_bytes = #front_chunks[1];
+
+	while front_bytes < bytes do
+		self.items:pop();
+		local chunk = self.items:peek();
+		front_bytes = front_bytes + #chunk;
+		table.insert(front_chunks, chunk);
+	end
+	self.items:replace(table.concat(front_chunks));
+	self.front_consumed = 0;
+end
+
+local function new(max_size, max_chunks)
+	if max_size and max_size <= 0 then
+		return nil;
+	end
+	return setmetatable({
+		front_consumed = 0;
+		_length = 0;
+		max_size = max_size;
+		items = queue.new(max_chunks or 32);
+	}, dynamic_buffer_mt);
+end
+
+return {
+	new = new;
+};
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/util/queue.lua	Thu Apr 07 18:11:33 2022 +0100
@@ -0,0 +1,82 @@
+-- Prosody IM
+-- Copyright (C) 2008-2015 Matthew Wild
+-- Copyright (C) 2008-2015 Waqas Hussain
+--
+-- This project is MIT/X11 licensed. Please see the
+-- COPYING file in the source package for more information.
+--
+
+-- Small ringbuffer library (i.e. an efficient FIFO queue with a size limit)
+-- (because unbounded dynamically-growing queues are a bad thing...)
+
+local have_utable, utable = pcall(require, "util.table"); -- For pre-allocation of table
+
+local function new(size, allow_wrapping)
+	-- Head is next insert, tail is next read
+	local head, tail = 1, 1;
+	local items = 0; -- Number of stored items
+	local t = have_utable and utable.create(size, 0) or {}; -- Table to hold items
+	--luacheck: ignore 212/self
+	return {
+		_items = t;
+		size = size;
+		count = function (self) return items; end;
+		push = function (self, item)
+			if items >= size then
+				if allow_wrapping then
+					tail = (tail%size)+1; -- Advance to next oldest item
+					items = items - 1;
+				else
+					return nil, "queue full";
+				end
+			end
+			t[head] = item;
+			items = items + 1;
+			head = (head%size)+1;
+			return true;
+		end;
+		pop = function (self)
+			if items == 0 then
+				return nil;
+			end
+			local item;
+			item, t[tail] = t[tail], 0;
+			tail = (tail%size)+1;
+			items = items - 1;
+			return item;
+		end;
+		peek = function (self)
+			if items == 0 then
+				return nil;
+			end
+			return t[tail];
+		end;
+		replace = function (self, data)
+			if items == 0 then
+				return self:push(data);
+			end
+			t[tail] = data;
+			return true;
+		end;
+		items = function (self)
+			return function (_, pos)
+				if pos >= items then
+					return nil;
+				end
+				local read_pos = tail + pos;
+				if read_pos > self.size then
+					read_pos = (read_pos%size);
+				end
+				return pos+1, t[read_pos];
+			end, self, 0;
+		end;
+		consume = function (self)
+			return self.pop, self;
+		end;
+	};
+end
+
+return {
+	new = new;
+};
+

mercurial