Skip to content
GitLab
Projects
Groups
Snippets
Help
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
L
liner
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Service Desk
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Operations
Operations
Incidents
Environments
Packages & Registries
Packages & Registries
Container Registry
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
AdminXVII
liner
Commits
ce3351b7
Commit
ce3351b7
authored
Jun 05, 2019
by
AdminXVII
Browse files
Options
Browse Files
Download
Plain Diff
Merge branch 'feature/complete_bugs' into 'master'
Feature/complete bugs See merge request
redox-os/liner!12
parents
ede738c0
32059cc3
Pipeline
#4444
passed with stage
in 2 minutes and 7 seconds
Changes
5
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
366 additions
and
86 deletions
+366
-86
src/buffer.rs
src/buffer.rs
+32
-16
src/complete.rs
src/complete.rs
+24
-2
src/editor.rs
src/editor.rs
+56
-14
src/history.rs
src/history.rs
+221
-28
src/test.rs
src/test.rs
+33
-26
No files found.
src/buffer.rs
View file @
ce3351b7
use
unicode_width
::
UnicodeWidthStr
;
use
std
::
fmt
::{
self
,
Write
as
FmtWrite
}
;
use
std
::
io
::{
self
,
Write
};
use
std
::
iter
::
FromIterator
;
use
std
::
fmt
::{
self
,
Write
as
FmtWrite
}
;
use
unicode_width
::
UnicodeWidthStr
;
/// A modification performed on a `Buffer`. These are used for the purpose of undo/redo.
#[derive(Debug,Clone)]
#[derive(Debug,
Clone)]
pub
enum
Action
{
Insert
{
start
:
usize
,
text
:
Vec
<
char
>
},
Remove
{
start
:
usize
,
text
:
Vec
<
char
>
},
...
...
@@ -44,6 +44,13 @@ pub struct Buffer {
undone_actions
:
Vec
<
Action
>
,
}
impl
PartialEq
for
Buffer
{
fn
eq
(
&
self
,
other
:
&
Self
)
->
bool
{
self
.data
==
other
.data
}
}
impl
Eq
for
Buffer
{}
impl
From
<
Buffer
>
for
String
{
fn
from
(
buf
:
Buffer
)
->
Self
{
String
::
from_iter
(
buf
.data
)
...
...
@@ -172,7 +179,10 @@ impl Buffer {
}
pub
fn
last_arg
(
&
self
)
->
Option
<&
[
char
]
>
{
self
.data
.split
(|
&
c
|
c
==
' '
)
.filter
(|
s
|
!
s
.is_empty
())
.last
()
self
.data
.split
(|
&
c
|
c
==
' '
)
.filter
(|
s
|
!
s
.is_empty
())
.last
()
}
pub
fn
num_chars
(
&
self
)
->
usize
{
...
...
@@ -242,11 +252,17 @@ impl Buffer {
}
pub
fn
range_width
(
&
self
,
start
:
usize
,
end
:
usize
)
->
Vec
<
usize
>
{
self
.range
(
start
,
end
)
.split
(
'\n'
)
.map
(|
s
|
s
.width
())
.collect
()
self
.range
(
start
,
end
)
.split
(
'\n'
)
.map
(|
s
|
s
.width
())
.collect
()
}
pub
fn
lines
(
&
self
)
->
Vec
<
String
>
{
self
.data
.split
(|
&
c
|
c
==
'\n'
)
.map
(|
s
|
s
.iter
()
.cloned
()
.collect
())
.collect
()
self
.data
.split
(|
&
c
|
c
==
'\n'
)
.map
(|
s
|
s
.iter
()
.cloned
()
.collect
())
.collect
()
}
pub
fn
chars
(
&
self
)
->
::
std
::
slice
::
Iter
<
char
>
{
...
...
@@ -259,7 +275,8 @@ impl Buffer {
}
pub
fn
print
<
W
>
(
&
self
,
out
:
&
mut
W
)
->
io
::
Result
<
()
>
where
W
:
Write
where
W
:
Write
,
{
let
string
:
String
=
self
.data
.iter
()
.cloned
()
.collect
();
out
.write_all
(
string
.as_bytes
())
...
...
@@ -275,7 +292,8 @@ impl Buffer {
/// the other stopped.
/// Used to implement autosuggestions.
pub
fn
print_rest
<
W
>
(
&
self
,
out
:
&
mut
W
,
after
:
usize
)
->
io
::
Result
<
usize
>
where
W
:
Write
where
W
:
Write
,
{
let
string
:
String
=
self
.data
.iter
()
.skip
(
after
)
.cloned
()
.collect
();
out
.write_all
(
string
.as_bytes
())
?
;
...
...
@@ -293,18 +311,14 @@ impl Buffer {
}
}
/// Check if the other buffer has the same content as this one.
pub
fn
equals
(
&
self
,
other
:
&
Buffer
)
->
bool
{
self
.data
==
other
.data
}
/// Check if the other buffer starts with the same content as this one.
/// Used to implement autosuggestions.
pub
fn
starts_with
(
&
self
,
other
:
&
Buffer
)
->
bool
{
let
other_len
=
other
.data
.len
();
let
self_len
=
self
.data
.len
();
if
!
other
.data
.is_empty
()
&&
self_len
!=
other_len
{
let
match_let
=
self
.data
if
!
other
.data
.is_empty
()
&&
self_len
!=
other_len
{
let
match_let
=
self
.data
.iter
()
.zip
(
&
other
.data
)
.take_while
(|
&
(
s
,
o
)|
*
s
==
*
o
)
...
...
@@ -322,7 +336,9 @@ impl Buffer {
if
search_term
.is_empty
()
{
return
false
;
}
self
.data
.windows
(
search_term
.len
())
.any
(|
window
|
window
==
search_term
)
self
.data
.windows
(
search_term
.len
())
.any
(|
window
|
window
==
search_term
)
}
/// Return true if the buffer is empty.
...
...
src/complete.rs
View file @
ce3351b7
...
...
@@ -31,12 +31,24 @@ impl Completer for BasicCompleter {
pub
struct
FilenameCompleter
{
working_dir
:
Option
<
PathBuf
>
,
case_sensitive
:
bool
,
}
impl
FilenameCompleter
{
pub
fn
new
<
T
:
Into
<
PathBuf
>>
(
working_dir
:
Option
<
T
>
)
->
Self
{
FilenameCompleter
{
working_dir
:
working_dir
.map
(|
p
|
p
.into
()),
case_sensitive
:
true
,
}
}
pub
fn
with_case_sensitivity
<
T
:
Into
<
PathBuf
>>
(
working_dir
:
Option
<
T
>
,
case_sensitive
:
bool
,
)
->
Self
{
FilenameCompleter
{
working_dir
:
working_dir
.map
(|
p
|
p
.into
()),
case_sensitive
,
}
}
}
...
...
@@ -77,7 +89,13 @@ impl Completer for FilenameCompleter {
&&
!
full_path
.ends_with
(
".."
)
=>
{
p
=
parent
;
start_name
=
full_path
.file_name
()
.unwrap
()
.to_string_lossy
();
start_name
=
if
self
.case_sensitive
{
full_path
.file_name
()
.unwrap
()
.to_string_lossy
()
}
else
{
let
sn
=
full_path
.file_name
()
.unwrap
()
.to_string_lossy
();
sn
.to_lowercase
();
sn
};
completing_dir
=
false
;
}
_
=>
{
...
...
@@ -100,7 +118,11 @@ impl Completer for FilenameCompleter {
Err
(
_
)
=>
continue
,
};
let
file_name
=
dir
.file_name
();
let
file_name
=
file_name
.to_string_lossy
();
let
file_name
=
if
self
.case_sensitive
{
file_name
.to_string_lossy
()
.to_string
()
}
else
{
file_name
.to_string_lossy
()
.to_lowercase
()
};
if
start_name
.is_empty
()
||
file_name
.starts_with
(
&*
start_name
)
{
let
mut
a
=
start_path
.clone
();
...
...
src/editor.rs
View file @
ce3351b7
...
...
@@ -106,6 +106,10 @@ pub struct Editor<'a, W: Write> {
// Buffer for the new line (ie. not from editing history)
new_buf
:
Buffer
,
// Buffer to use when editing history so we do not overwrite it.
hist_buf
:
Buffer
,
hist_buf_valid
:
bool
,
// None if we're on the new buffer, else the index of history
cur_history_loc
:
Option
<
usize
>
,
...
...
@@ -134,26 +138,30 @@ pub struct Editor<'a, W: Write> {
history_subset_loc
:
Option
<
usize
>
,
autosuggestion
:
Option
<
Buffer
>
,
history_fresh
:
bool
,
}
macro_rules!
cur_buf_mut
{
(
$s:expr
)
=>
{
(
$s:expr
)
=>
{{
$s
.buffer_changed
=
true
;
match
$s
.cur_history_loc
{
Some
(
i
)
=>
{
$s
.buffer_changed
=
true
;
&
mut
$s
.context.history
[
i
]
}
_
=>
{
$s
.buffer_changed
=
true
;
&
mut
$s
.new_buf
if
!
$s
.hist_buf_valid
{
$s
.hist_buf
.copy_buffer
(
&
$s
.context.history
[
i
]);
$s
.hist_buf_valid
=
true
;
}
&
mut
$s
.hist_buf
}
_
=>
&
mut
$s
.new_buf
,
}
};
}
}
;
}
macro_rules!
cur_buf
{
(
$s:expr
)
=>
{
match
$s
.cur_history_loc
{
Some
(
_
)
if
$s
.hist_buf_valid
=>
&
$s
.hist_buf
,
Some
(
i
)
=>
&
$s
.context.history
[
i
],
_
=>
&
$s
.new_buf
,
}
...
...
@@ -183,6 +191,8 @@ impl<'a, W: Write> Editor<'a, W> {
out
:
out
,
closure
:
f
,
new_buf
:
buffer
.into
(),
hist_buf
:
Buffer
::
new
(),
hist_buf_valid
:
false
,
cur_history_loc
:
None
,
context
:
context
,
show_completions_hint
:
None
,
...
...
@@ -196,6 +206,7 @@ impl<'a, W: Write> Editor<'a, W> {
history_subset_index
:
vec!
[],
history_subset_loc
:
None
,
autosuggestion
:
None
,
history_fresh
:
false
,
};
if
!
ed
.new_buf
.is_empty
()
{
...
...
@@ -242,6 +253,7 @@ impl<'a, W: Write> Editor<'a, W> {
// XXX: Returning a bool to indicate doneness is a bit awkward, maybe change it
pub
fn
handle_newline
(
&
mut
self
)
->
io
::
Result
<
bool
>
{
self
.history_fresh
=
false
;
if
self
.is_search
()
{
self
.accept_autosuggestion
()
?
;
}
...
...
@@ -275,6 +287,13 @@ impl<'a, W: Write> Editor<'a, W> {
}
}
fn
freshen_history
(
&
mut
self
)
{
if
self
.context.history.share
&&
!
self
.history_fresh
{
let
_
=
self
.context.history
.load_history
(
false
);
self
.history_fresh
=
true
;
}
}
/// Refresh incremental search, either when started or when the buffer changes.
fn
refresh_search
(
&
mut
self
,
forward
:
bool
)
{
let
search_history_loc
=
self
.search_history_loc
();
...
...
@@ -304,6 +323,7 @@ impl<'a, W: Write> Editor<'a, W> {
self
.reverse_search
=
!
forward
;
self
.forward_search
=
forward
;
self
.cur_history_loc
=
None
;
self
.hist_buf_valid
=
false
;
self
.no_newline
=
true
;
self
.buffer_changed
=
false
;
}
...
...
@@ -314,6 +334,7 @@ impl<'a, W: Write> Editor<'a, W> {
/// search with forward changed (i.e. reverse search direction for one result).
pub
fn
search
(
&
mut
self
,
forward
:
bool
)
->
io
::
Result
<
()
>
{
if
!
self
.is_search
()
{
self
.freshen_history
();
self
.refresh_search
(
forward
);
}
else
if
self
.history_subset_index
.len
()
>
0
{
self
.history_subset_loc
=
if
let
Some
(
p
)
=
self
.history_subset_loc
{
...
...
@@ -430,10 +451,16 @@ impl<'a, W: Write> Editor<'a, W> {
pub
fn
complete
<
T
:
Completer
>
(
&
mut
self
,
handler
:
&
mut
T
)
->
io
::
Result
<
()
>
{
handler
.on_event
(
Event
::
new
(
self
,
EventKind
::
BeforeComplete
));
if
let
Some
((
completions
,
i
))
=
self
.show_completions_hint
.take
()
{
let
i
=
i
.map_or
(
0
,
|
i
|
(
i
+
1
)
%
completions
.len
());
if
let
Some
((
completions
,
i
_in
))
=
self
.show_completions_hint
.take
()
{
let
i
=
i
_in
.map_or
(
0
,
|
i
|
(
i
+
1
)
%
completions
.len
());
self
.delete_word_before_cursor
(
false
)
?
;
match
i_in
{
Some
(
x
)
if
cur_buf!
(
self
)
==
&
Buffer
::
from
(
&
completions
[
x
][
..
])
=>
{
cur_buf_mut!
(
self
)
.truncate
(
0
);
self
.cursor
=
0
;
}
_
=>
self
.delete_word_before_cursor
(
false
)
?
,
}
self
.insert_str_after_cursor
(
&
completions
[
i
])
?
;
self
.show_completions_hint
=
Some
((
completions
,
Some
(
i
)));
...
...
@@ -550,6 +577,8 @@ impl<'a, W: Write> Editor<'a, W> {
if
self
.is_search
()
{
self
.search
(
false
)
}
else
{
self
.hist_buf_valid
=
false
;
self
.freshen_history
();
if
self
.new_buf
.num_chars
()
>
0
{
match
self
.history_subset_loc
{
Some
(
i
)
if
i
>
0
=>
{
...
...
@@ -586,6 +615,7 @@ impl<'a, W: Write> Editor<'a, W> {
if
self
.is_search
()
{
self
.search
(
true
)
}
else
{
self
.hist_buf_valid
=
false
;
if
self
.new_buf
.num_chars
()
>
0
{
if
let
Some
(
i
)
=
self
.history_subset_loc
{
if
i
<
self
.history_subset_index
.len
()
-
1
{
...
...
@@ -595,6 +625,7 @@ impl<'a, W: Write> Editor<'a, W> {
self
.cur_history_loc
=
None
;
self
.history_subset_loc
=
None
;
self
.history_subset_index
.clear
();
self
.history_fresh
=
false
;
}
}
}
else
{
...
...
@@ -602,7 +633,7 @@ impl<'a, W: Write> Editor<'a, W> {
Some
(
i
)
if
i
<
self
.context.history
.len
()
-
1
=>
{
self
.cur_history_loc
=
Some
(
i
+
1
)
}
_
=>
()
,
_
=>
self
.history_fresh
=
false
,
}
}
self
.move_cursor_to_end_of_line
()
...
...
@@ -611,6 +642,7 @@ impl<'a, W: Write> Editor<'a, W> {
/// Moves to the start of history (ie. the earliest history entry).
pub
fn
move_to_start_of_history
(
&
mut
self
)
->
io
::
Result
<
()
>
{
self
.hist_buf_valid
=
false
;
if
self
.context.history
.len
()
>
0
{
self
.cur_history_loc
=
Some
(
0
);
self
.move_cursor_to_end_of_line
()
...
...
@@ -623,6 +655,7 @@ impl<'a, W: Write> Editor<'a, W> {
/// Moves to the end of history (ie. the new buffer).
pub
fn
move_to_end_of_history
(
&
mut
self
)
->
io
::
Result
<
()
>
{
self
.hist_buf_valid
=
false
;
if
self
.cur_history_loc
.is_some
()
{
self
.cur_history_loc
=
None
;
self
.move_cursor_to_end_of_line
()
...
...
@@ -779,7 +812,6 @@ impl<'a, W: Write> Editor<'a, W> {
/// Moves the cursor to the end of the line.
pub
fn
move_cursor_to_end_of_line
(
&
mut
self
)
->
io
::
Result
<
()
>
{
//self.clear_search();
self
.cursor
=
cur_buf!
(
self
)
.num_chars
();
self
.no_newline
=
true
;
self
.display
()
...
...
@@ -828,6 +860,10 @@ impl<'a, W: Write> Editor<'a, W> {
/// searching the first history entry to start with current text (reverse order).
/// Return None if nothing found.
fn
current_autosuggestion
(
&
mut
self
)
->
Option
<
Buffer
>
{
// If we are editing a previous history item no autosuggestion.
if
self
.hist_buf_valid
{
return
None
;
}
let
context_history
=
&
self
.context.history
;
let
autosuggestion
=
if
self
.is_search
()
{
self
.search_history_loc
()
.map
(|
i
|
&
context_history
[
i
])
...
...
@@ -1088,7 +1124,13 @@ impl<'a, W: Write> Editor<'a, W> {
impl
<
'a
,
W
:
Write
>
From
<
Editor
<
'a
,
W
>>
for
String
{
fn
from
(
ed
:
Editor
<
'a
,
W
>
)
->
String
{
match
ed
.cur_history_loc
{
Some
(
i
)
=>
ed
.context.history
[
i
]
.clone
(),
Some
(
i
)
=>
{
if
ed
.hist_buf_valid
{
ed
.hist_buf
}
else
{
ed
.context.history
[
i
]
.clone
()
}
}
_
=>
ed
.new_buf
,
}
.into
()
...
...
src/history.rs
View file @
ce3351b7
...
...
@@ -29,6 +29,16 @@ pub struct History {
max_file_size
:
usize
,
// TODO set from environment variable?
pub
append_duplicate_entries
:
bool
,
/// Append each entry to history file as entered?
pub
inc_append
:
bool
,
/// Share history across ion's with the same history file (combine with inc_append).
pub
share
:
bool
,
/// Last filesize of history file, used to optimize history sharing.
pub
file_size
:
u64
,
/// Allow loading duplicate entries, need to know this for loading history files.
pub
load_duplicates
:
bool
,
/// Writes between history compaction.
compaction_writes
:
usize
,
}
impl
History
{
...
...
@@ -40,36 +50,162 @@ impl History {
max_buffers_size
:
DEFAULT_MAX_SIZE
,
max_file_size
:
DEFAULT_MAX_SIZE
,
append_duplicate_entries
:
false
,
inc_append
:
false
,
share
:
false
,
file_size
:
0
,
load_duplicates
:
true
,
compaction_writes
:
0
,
}
}
/// Set history file name and at the same time load the history.
pub
fn
set_file_name_and_load_history
<
P
:
AsRef
<
Path
>>
(
/// Clears out the history.
pub
fn
clear_history
(
&
mut
self
)
{
self
.buffers
.clear
();
}
/// Loads the history file from the saved path and appends it to the end of the history if append
/// is true otherwise replace history.
pub
fn
load_history
(
&
mut
self
,
append
:
bool
)
->
io
::
Result
<
u64
>
{
if
let
Some
(
path
)
=
self
.file_name
.clone
()
{
let
file_size
=
self
.file_size
;
self
.load_history_file_test
(
&
path
,
file_size
,
append
)
.map
(|
l
|
{
self
.file_size
=
l
;
l
})
}
else
{
Err
(
io
::
Error
::
new
(
io
::
ErrorKind
::
Other
,
"History filename not set!"
,
))
}
}
/// Loads the history file from path and appends it to the end of the history if append is true.
pub
fn
load_history_file
<
P
:
AsRef
<
Path
>>
(
&
mut
self
,
path
:
P
,
append
:
bool
)
->
io
::
Result
<
u64
>
{
self
.load_history_file_test
(
path
,
0
,
append
)
}
/// Loads the history file from path and appends it to the end of the history.f append is true
/// (replaces if false). Only loads if length is not equal to current file size.
fn
load_history_file_test
<
P
:
AsRef
<
Path
>>
(
&
mut
self
,
path
:
P
,
length
:
u64
,
append
:
bool
,
)
->
io
::
Result
<
u64
>
{
let
path
=
path
.as_ref
();
let
file
=
if
path
.exists
()
{
File
::
open
(
path
)
?
}
else
{
let
status
=
format!
(
"File not found {:?}"
,
path
);
return
Err
(
io
::
Error
::
new
(
io
::
ErrorKind
::
Other
,
status
));
};
let
new_length
=
file
.metadata
()
?
.len
();
if
new_length
==
0
&&
length
==
0
&&
!
append
{
// Special case, trying to load nothing and not appending- just clear.
self
.clear_history
();
}
if
new_length
!=
length
{
if
!
append
{
self
.clear_history
();
}
let
reader
=
BufReader
::
new
(
file
);
for
line
in
reader
.lines
()
{
match
line
{
Ok
(
line
)
=>
{
if
!
line
.starts_with
(
'#'
)
{
self
.buffers
.push_back
(
Buffer
::
from
(
line
));
}
}
Err
(
_
)
=>
break
,
}
}
self
.to_max_size
();
if
!
self
.load_duplicates
{
let
mut
tmp_buffers
:
Vec
<
Buffer
>
=
Vec
::
with_capacity
(
self
.buffers
.len
());
// Remove duplicates from loaded history if we do not want it.
while
let
Some
(
buf
)
=
self
.buffers
.pop_back
()
{
self
.remove_duplicates
(
&
buf
.to_string
()[
..
]);
tmp_buffers
.push
(
buf
);
}
while
let
Some
(
buf
)
=
tmp_buffers
.pop
()
{
self
.buffers
.push_back
(
buf
);
}
}
}
Ok
(
new_length
)
}
/// Removes duplicates and trims a history file to max_file_size.
/// Primarily if inc_append is set without shared history.
/// Static because it should have no side effects on a history object.
fn
deduplicate_history_file
<
P
:
AsRef
<
Path
>>
(
path
:
P
,
max_file_size
:
usize
,
)
->
io
::
Result
<
String
>
{
let
status
;
let
path
=
path
.as_ref
();
let
file
=
if
path
.exists
()
{
status
=
format!
(
"opening {:?}"
,
path
);
File
::
open
(
path
)
?
}
else
{
status
=
format!
(
"creating
{:?}"
,
path
);
File
::
create
(
path
)
?
let
status
=
format!
(
"File not found
{:?}"
,
path
);
return
Err
(
io
::
Error
::
new
(
io
::
ErrorKind
::
Other
,
status
));
};
let
mut
buf
:
VecDeque
<
String
>
=
VecDeque
::
new
();
let
reader
=
BufReader
::
new
(
file
);
for
line
in
reader
.lines
()
{
match
line
{
Ok
(
line
)
=>
{
if
!
line
.starts_with
(
'#'
)
{
self
.buffers
.push_back
(
Buffer
::
from
(
line
)
);
buf
.push_back
(
line
);
}
}
Err
(
_
)
=>
break
,
}
}
let
org_length
=
buf
.len
();
if
buf
.len
()
>=
max_file_size
{
let
pop_out
=
buf
.len
()
-
max_file_size
;
for
_
in
0
..
pop_out
{
buf
.pop_front
();
}
}
let
mut
tmp_buffers
:
Vec
<
String
>
=
Vec
::
with_capacity
(
buf
.len
());
// Remove duplicates from loaded history if we do not want it.
while
let
Some
(
line
)
=
buf
.pop_back
()
{
buf
.retain
(|
buffer
|
*
buffer
!=
line
);
tmp_buffers
.push
(
line
);
}
while
let
Some
(
line
)
=
tmp_buffers
.pop
()
{
buf
.push_back
(
line
);
}
if
org_length
!=
buf
.len
()
{
// Overwrite the history file with the deduplicated version if it changed.
let
mut
file
=
BufWriter
::
new
(
File
::
create
(
&
path
)
?
);
// Write the commands to the history file.
for
command
in
buf
.into_iter
()
{
let
_
=
file
.write_all
(
&
String
::
from
(
command
)
.as_bytes
());
let
_
=
file
.write_all
(
b
"
\n
"
);
}
}
Ok
(
"De-duplicated history file."
.to_string
())
}
/// Set history file name and at the same time load the history.
pub
fn
set_file_name_and_load_history
<
P
:
AsRef
<
Path
>>
(
&
mut
self
,
path
:
P
)
->
io
::
Result
<
u64
>
{
let
path
=
path
.as_ref
();
self
.file_name
=
path
.to_str
()
.map
(|
s
|
s
.to_owned
());
Ok
(
status
)
self
.file_size
=
0
;
if
path
.exists
()
{
self
.load_history_file
(
path
,
false
)
.map
(|
l
|
{
self
.file_size
=
l
;
l
})
}
else
{
File
::
create
(
path
)
?
;
Ok
(
0
)
}
}
/// Set maximal number of buffers stored in memory
...
...
@@ -100,10 +236,55 @@ impl History {
return
Ok
(());
}
let
item_str
=
String
::
from
(
new_item
.clone
());
self
.buffers
.push_back
(
new_item
);
//self.to_max_size();
while
self
.buffers
.len
()
>
self
.max_buffers_size
{
self
.buffers
.pop_front
();
}
if
self
.inc_append
&&
self
.file_name
.is_some
()
{
if
!
self
.load_duplicates
{
// Do not want duplicates so periodically compact the history file.
self
.compaction_writes
+=
1
;
// Every 30 writes "compact" the history file by writing just in memory history. This
// is to keep the history file clean and at a reasonable size (not much over max
// history size at it's worst).
if
self
.compaction_writes
>
29
{
if
self
.share
{
// Reload history, we may be out of sync.
let
_
=
self
.load_history
(
false
);
// Commit the duplicated history.
if
let
Some
(
file_name
)
=
self
.file_name
.clone
()
{
let
_
=
self
.overwrite_history
(
file_name
);
}
}
else
{
// Not using shared history so just de-dup the file without messing with
// our history.
if
let
Some
(
file_name
)
=
self
.file_name
.clone
()
{
let
_
=
History
::
deduplicate_history_file
(
file_name
,
self
.max_file_size
);
}
}
self
.compaction_writes
=
0
;
}
}
else
{
// If allowing duplicates then no need for compaction.
self
.compaction_writes
=
1
;
}
let
file_name
=
self
.file_name
.clone
()
.unwrap
();
if
let
Ok
(
inner_file
)
=
std
::
fs
::
OpenOptions
::
new
()
.append
(
true
)
.open
(
&
file_name
)
{
// Leave file size alone, if it is not right trigger a reload later.
if
self
.compaction_writes
>
0
{
// If 0 we "compacted" and nothing to write.
let
mut
file
=
BufWriter
::
new
(
inner_file
);
let
_
=
file
.write_all
(
&
item_str
.as_bytes
());
let
_
=
file
.write_all
(
b
"
\n
"
);
// Save the filesize after each append so we do not reload when we do not need to.
self
.file_size
+=
item_str
.len
()
as
u64
+
1
;
}
}
}
Ok
(())
}
...
...
@@ -150,7 +331,7 @@ impl History {
if
starts
{
v
.push
(
*
i
);
}
if
contains
&&
!
starts
&&
!
tested
.equals
(
search_term
)
{
if
contains
&&
!
starts
&&
tested
!=
search_term
{
return
true
;
}
}
...
...
@@ -175,28 +356,40 @@ impl History {
self
.file_name
.as_ref
()
.map
(|
s
|
s
.as_str
())
}
pub
fn
commit_to_file
(
&
mut
self
)
{
if
let
Some
(
file_name
)
=
self
.file_name
.clone
()
{
// Find how many bytes we need to move backwards
// in the file to remove all the old commands.
if
self
.buffers
.len
()
>=
self
.max_file_size
{
let
pop_out
=
self
.buffers
.len
()
-
self
.max_file_size
;
for
_
in
0
..
pop_out
{
self
.buffers
.pop_front
();