Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
89 changes: 88 additions & 1 deletion include/leveled.hrl
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
-include_lib("kernel/include/logger.hrl").

%%%============================================================================
%%% File paths
%%%============================================================================
Expand Down Expand Up @@ -81,11 +83,96 @@
-define(EQC_TIME_BUDGET, 120).

%%%============================================================================
%%% Helper Function
%%% Helper Functions
%%%============================================================================

-define(IS_DEF(Attribute), Attribute =/= undefined).

-define(LOG_LOCATION, #{
mfa => {?MODULE, ?FUNCTION_NAME, ?FUNCTION_ARITY},
line => ?LINE,
file => ?FILE
}).

-define(STD_LOG(LogRef, Subs),
?STD_LOG_INT(
leveled_log:get_loglevel(LogRef),
LogRef,
Subs,
leveled_log:get_opts()
)
).

%% Erlang apply is used because a variable list of arguments is provided
-define(STD_LOG_INT(LogLevel, LogRef, Subs, LogOpts),
case
logger:allow(LogLevel, ?MODULE) andalso
leveled_log:should_i_log(LogLevel, LogRef, LogOpts)
of
true ->
erlang:apply(
logger,
macro_log,
[
?LOG_LOCATION
| leveled_log:log(LogLevel, LogRef, LogOpts, Subs)
]
);
false ->
ok
end
).

-define(RND_LOG(LogRef, Subs, StartTime, RandomProb),
case rand:uniform() < RandomProb of
true ->
?TMR_LOG_INT(
leveled_log:get_loglevel(LogRef),
LogRef,
Subs,
leveled_log:get_opts(),
StartTime
);
false ->
ok
end
).

-define(TMR_LOG(LogRef, Subs, StartTime),
?TMR_LOG_INT(
leveled_log:get_loglevel(LogRef),
LogRef,
Subs,
leveled_log:get_opts(),
StartTime
)
).

-define(TMR_LOG_INT(LogLevel, LogRef, Subs, LogOpts, StartTime),
case
logger:allow(LogLevel, ?MODULE) andalso
leveled_log:should_i_log(LogLevel, LogRef, LogOpts)
of
true ->
erlang:apply(
logger,
macro_log,
[
?LOG_LOCATION
| leveled_log:log_timer(
LogLevel,
LogRef,
LogOpts,
Subs,
StartTime
)
]
);
false ->
ok
end
).

-if(?OTP_RELEASE < 26).
-type dynamic() :: any().
-endif.
Expand Down
22 changes: 11 additions & 11 deletions src/leveled_bookie.erl
Original file line number Diff line number Diff line change
Expand Up @@ -787,7 +787,7 @@ book_indexfold(Pid, Bucket, FoldAccT, Range, TermHandling) ->
% future release this code branch may be removed, and such queries may
% instead return `error`. For now null is assumed to be lower than any
% key
leveled_log:log(b0019, [Bucket]),
?STD_LOG(b0019, [Bucket]),
book_indexfold(Pid, {Bucket, null}, FoldAccT, Range, TermHandling).

-type query() ::
Expand Down Expand Up @@ -1377,7 +1377,7 @@ init([Opts]) ->
% and performance may be unpredictable
case CacheRatio > 32 of
true ->
leveled_log:log(b0020, [PCLMaxSize, ConfiguredCacheSize]);
?STD_LOG(b0020, [PCLMaxSize, ConfiguredCacheSize]);
false ->
ok
end,
Expand Down Expand Up @@ -1406,7 +1406,7 @@ init([Opts]) ->
{Inker, Penciller} = startup(InkerOpts, PencillerOpts0),

NewETS = ets:new(mem, [ordered_set]),
leveled_log:log(b0001, [Inker, Penciller]),
?STD_LOG(b0001, [Inker, Penciller]),
{ok, #state{
cache_size = CacheSize,
cache_multiple = MaxCacheMultiple,
Expand All @@ -1424,7 +1424,7 @@ init([Opts]) ->
BookieMonitor = erlang:monitor(process, Bookie),
NewETS = ets:new(mem, [ordered_set]),
{HeadOnly, Lookup} = leveled_bookie:book_headstatus(Bookie),
leveled_log:log(b0002, [Inker, Penciller]),
?STD_LOG(b0002, [Inker, Penciller]),
{ok, #state{
penciller = Penciller,
inker = Inker,
Expand Down Expand Up @@ -1729,7 +1729,7 @@ handle_call(
handle_call(destroy, _From, State = #state{is_snapshot = Snp}) when
Snp == false
->
leveled_log:log(b0011, []),
?STD_LOG(b0011, []),
{ok, InkPathList} = leveled_inker:ink_doom(State#state.inker),
{ok, PCLPathList} = leveled_penciller:pcl_doom(State#state.penciller),
leveled_monitor:monitor_close(element(1, State#state.monitor)),
Expand Down Expand Up @@ -1794,13 +1794,13 @@ handle_info(
{'DOWN', BookieMonRef, process, BookiePid, Info},
State = #state{bookie_monref = BookieMonRef, is_snapshot = true}
) ->
leveled_log:log(b0004, [BookiePid, Info]),
?STD_LOG(b0004, [BookiePid, Info]),
{stop, normal, State};
handle_info(_Info, State) ->
{noreply, State}.

terminate(Reason, _State) ->
leveled_log:log(b0003, [Reason]).
?STD_LOG(b0003, [Reason]).

code_change(_OldVsn, State, _Extra) ->
{ok, State}.
Expand Down Expand Up @@ -1854,7 +1854,7 @@ push_to_penciller(Penciller, LoadItemList, LedgerCache, ReloadStrategy) ->
),
case length(UpdLedgerCache#ledger_cache.load_queue) of
N when N > ?LOADING_BATCH ->
leveled_log:log(b0006, [UpdLedgerCache#ledger_cache.max_sqn]),
?STD_LOG(b0006, [UpdLedgerCache#ledger_cache.max_sqn]),
ok =
push_to_penciller_loop(
Penciller, loadqueue_ledgercache(UpdLedgerCache)
Expand Down Expand Up @@ -1990,7 +1990,7 @@ startup(InkerOpts, PencillerOpts) ->
{ok, Inker} = leveled_inker:ink_start(InkerOpts),
{ok, Penciller} = leveled_penciller:pcl_start(PencillerOpts),
LedgerSQN = leveled_penciller:pcl_getstartupsequencenumber(Penciller),
leveled_log:log(b0005, [LedgerSQN]),
?STD_LOG(b0005, [LedgerSQN]),
ReloadStrategy = InkerOpts#inker_options.reload_strategy,
LoadFun = get_loadfun(),
BatchFun =
Expand All @@ -2001,7 +2001,7 @@ startup(InkerOpts, PencillerOpts) ->
end,
InitAccFun =
fun(FN, CurrentMinSQN) ->
leveled_log:log(i0014, [FN, CurrentMinSQN]),
?STD_LOG(i0014, [FN, CurrentMinSQN]),
[]
end,
FinalAcc =
Expand Down Expand Up @@ -2525,7 +2525,7 @@ return_ledger_keyrange(Tag, Bucket, KeyRange) ->
maybe_longrunning(SW, Aspect) ->
case timer:now_diff(os:timestamp(), SW) of
N when N > ?LONG_RUNNING ->
leveled_log:log(b0013, [N, Aspect]);
?STD_LOG(b0013, [N, Aspect]);
_ ->
ok
end.
Expand Down
44 changes: 22 additions & 22 deletions src/leveled_cdb.erl
Original file line number Diff line number Diff line change
Expand Up @@ -476,10 +476,10 @@ callback_mode() ->

starting({call, From}, {open_writer, Filename}, State) ->
leveled_log:save(State#state.log_options),
leveled_log:log(cdb01, [Filename]),
?STD_LOG(cdb01, [Filename]),
{LastPosition, HashTree, LastKey} = open_active_file(Filename),
{WriteOps, UpdStrategy} = set_writeops(State#state.sync_strategy),
leveled_log:log(cdb13, [WriteOps]),
?STD_LOG(cdb13, [WriteOps]),
{ok, Handle} = file:open(Filename, WriteOps),
State0 = State#state{
handle = Handle,
Expand All @@ -493,7 +493,7 @@ starting({call, From}, {open_writer, Filename}, State) ->
{next_state, writer, State0, [{reply, From, ok}, hibernate]};
starting({call, From}, {open_reader, Filename}, State) ->
leveled_log:save(State#state.log_options),
leveled_log:log(cdb02, [Filename]),
?STD_LOG(cdb02, [Filename]),
{Handle, Index, LastKey} = open_for_readonly(Filename, false),
State0 = State#state{
handle = Handle,
Expand All @@ -504,7 +504,7 @@ starting({call, From}, {open_reader, Filename}, State) ->
{next_state, reader, State0, [{reply, From, ok}, hibernate]};
starting({call, From}, {open_reader, Filename, LastKey}, State) ->
leveled_log:save(State#state.log_options),
leveled_log:log(cdb02, [Filename]),
?STD_LOG(cdb02, [Filename]),
{Handle, Index, LastKey} = open_for_readonly(Filename, LastKey),
State0 = State#state{
handle = Handle,
Expand Down Expand Up @@ -706,7 +706,7 @@ rolling(
ok = write_top_index_table(Handle, BasePos, IndexList),
file:close(Handle),
ok = rename_for_read(FN, NewName),
leveled_log:log(cdb03, [NewName]),
?STD_LOG(cdb03, [NewName]),
ets:delete(State#state.hashtree),
{NewHandle, Index, LastKey} =
open_for_readonly(NewName, State#state.last_key),
Expand All @@ -720,7 +720,7 @@ rolling(
true ->
{next_state, delete_pending, State0, [{reply, From, ok}]};
false ->
leveled_log:log_timer(cdb18, [], SW),
?TMR_LOG(cdb18, [], SW),
{next_state, reader, State0, [{reply, From, ok}, hibernate]}
end;
rolling({call, From}, check_hashtable, _State) ->
Expand Down Expand Up @@ -819,7 +819,7 @@ reader(
) when
?IS_DEF(FN), ?IS_DEF(IO)
->
leveled_log:log(cdb05, [FN, reader, cdb_ccomplete]),
?STD_LOG(cdb05, [FN, reader, cdb_ccomplete]),
ok = file:close(IO),
{stop_and_reply, normal, [{reply, From, {ok, FN}}], State#state{
handle = undefined
Expand Down Expand Up @@ -870,7 +870,7 @@ delete_pending(
) when
?IS_DEF(FN), ?IS_DEF(IO)
->
leveled_log:log(cdb05, [FN, delete_pending, cdb_close]),
?STD_LOG(cdb05, [FN, delete_pending, cdb_close]),
close_pendingdelete(IO, FN, State#state.waste_path),
{stop_and_reply, normal, [{reply, From, ok}]};
delete_pending({call, From}, Event, State) ->
Expand All @@ -880,15 +880,15 @@ delete_pending(
) when
?IS_DEF(FN), ?IS_DEF(IO)
->
leveled_log:log(cdb04, [FN, State#state.delete_point]),
?STD_LOG(cdb04, [FN, State#state.delete_point]),
close_pendingdelete(IO, FN, State#state.waste_path),
{stop, normal};
delete_pending(
cast, destroy, State = #state{handle = IO, filename = FN}
) when
?IS_DEF(FN), ?IS_DEF(IO)
->
leveled_log:log(cdb05, [FN, delete_pending, destroy]),
?STD_LOG(cdb05, [FN, delete_pending, destroy]),
close_pendingdelete(IO, FN, State#state.waste_path),
{stop, normal};
delete_pending(
Expand All @@ -906,7 +906,7 @@ delete_pending(
),
{keep_state_and_data, [?DELETE_TIMEOUT]};
false ->
leveled_log:log(cdb04, [FN, ManSQN]),
?STD_LOG(cdb04, [FN, ManSQN]),
close_pendingdelete(IO, FN, State#state.waste_path),
{stop, normal}
end.
Expand Down Expand Up @@ -1058,7 +1058,7 @@ close_pendingdelete(Handle, Filename, WasteFP) ->
false ->
% This may happen when there has been a destroy while files are
% still pending deletion
leveled_log:log(cdb21, [Filename])
?STD_LOG(cdb21, [Filename])
end.

-spec set_writeops(sync | riak_sync | none) ->
Expand Down Expand Up @@ -1101,7 +1101,7 @@ open_active_file(FileName) when is_list(FileName) ->
{?BASE_POSITION, 0} ->
ok;
_ ->
leveled_log:log(cdb06, [LastPosition, EndPosition])
?STD_LOG(cdb06, [LastPosition, EndPosition])
end,
{ok, _LastPosition} = file:position(Handle, LastPosition),
ok = file:truncate(Handle),
Expand Down Expand Up @@ -1280,7 +1280,7 @@ hashtable_calc(HashTree, StartPos) ->
Seq = lists:seq(0, 255),
SWC = os:timestamp(),
{IndexList, HashTreeBin} = write_hash_tables(Seq, HashTree, StartPos),
leveled_log:log_timer(cdb07, [], SWC),
?TMR_LOG(cdb07, [], SWC),
{IndexList, HashTreeBin}.

%%%%%%%%%%%%%%%%%%%%
Expand All @@ -1292,7 +1292,7 @@ determine_new_filename(Filename) ->

rename_for_read(Filename, NewName) ->
%% Rename file
leveled_log:log(cdb08, [Filename, NewName, filelib:is_file(NewName)]),
?STD_LOG(cdb08, [Filename, NewName, filelib:is_file(NewName)]),
file:rename(Filename, NewName).

-spec open_for_readonly(string(), term()) ->
Expand Down Expand Up @@ -1487,7 +1487,7 @@ scan_over_file(Handle, Position, FilterFun, Output, LastKey) ->
% Not interesting that we've nothing to read at base
ok;
_ ->
leveled_log:log(cdb09, [Position])
?STD_LOG(cdb09, [Position])
end,
% Bring file back to that position
{ok, Position} = file:position(Handle, {bof, Position}),
Expand Down Expand Up @@ -1590,7 +1590,7 @@ safe_read_next(Handle, Length, ReadType) ->
end
catch
error:ReadError ->
leveled_log:log(cdb20, [ReadError, Length]),
?STD_LOG(cdb20, [ReadError, Length]),
false
end.

Expand All @@ -1604,11 +1604,11 @@ crccheck(<<CRC:32/integer, Value/binary>>, KeyBin) when is_binary(KeyBin) ->
CRC ->
Value;
_ ->
leveled_log:log(cdb10, ["mismatch"]),
?STD_LOG(cdb10, ["mismatch"]),
false
end;
crccheck(_V, _KB) ->
leveled_log:log(cdb10, ["size"]),
?STD_LOG(cdb10, ["size"]),
false.

-spec calc_crc(binary(), binary()) -> integer().
Expand Down Expand Up @@ -1710,7 +1710,7 @@ search_hash_table(
end,
case KV of
missing ->
leveled_log:log(cdb15, [Hash]),
?STD_LOG(cdb15, [Hash]),
search_hash_table(
Handle,
{FirstHashPosition, Slot, CycleCount + 1, TotalSlots},
Expand Down Expand Up @@ -1765,7 +1765,7 @@ perform_write_hash_tables(Handle, HashTreeBin, StartPos) ->
ok = file:write(Handle, HashTreeBin),
{ok, EndPos} = file:position(Handle, cur),
ok = file:advise(Handle, StartPos, EndPos - StartPos, will_need),
leveled_log:log_timer(cdb12, [], SWW),
?TMR_LOG(cdb12, [], SWW),
ok.

%% Write the top most 255 doubleword entries. First word is the
Expand Down Expand Up @@ -1952,7 +1952,7 @@ write_hash_tables(
HT_BinList,
{T1, T2, T3}
) ->
leveled_log:log(cdb14, [T1, T2, T3]),
?STD_LOG(cdb14, [T1, T2, T3]),
IL = lists:reverse(IndexList),
{IL, list_to_binary(lists:reverse(HT_BinList))};
write_hash_tables(
Expand Down
2 changes: 1 addition & 1 deletion src/leveled_codec.erl
Original file line number Diff line number Diff line change
Expand Up @@ -626,7 +626,7 @@ get_tagstrategy(Tag, Strategy) ->
%% running in head_only mode - so don't warn
retain;
false ->
leveled_log:log(ic012, [Tag, Strategy]),
?STD_LOG(ic012, [Tag, Strategy]),
retain
end.

Expand Down
Loading