Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -99,10 +99,12 @@ let tests =
"購入する", Position(0, 0), "購入", "cjk"
"配送についての質問", Position(0, 0), "配送", "cjk"
"注文と配送", Position(0, 0), "注文", "cjk"
"注文と配送", Position(0, 0), "配送", "cjk"
"注文と配送", Position(0, 3), "配送", "cjk"
"オーダー", Position(0, 0), "注文", "cjk"
"购物车", Position(0, 0), "购物车", "cjk"
"사용자", Position(0, 0), "사용자", "cjk" ]
"사용자", Position(0, 0), "사용자", "cjk"
"注文が届く", Position(0, 1), "注文", "cjk"
"注文と配送", Position(0, 4), "配送", "cjk" ]
|> List.map testHoverTermFoundWithDefaultGlossary
|> testList "CJK term found when hovering via substring matching"

Expand Down Expand Up @@ -192,7 +194,10 @@ let tests =
"peere", Position(0, 0), "three"
"Something", Position(0, 0), "empty_terms_list"
"料理", Position(0, 0), "cjk"
"ユーザー", Position(0, 0), "cjk" ]
"ユーザー", Position(0, 0), "cjk"
"注文が届く", Position(0, 2), "cjk"
"注文が届く", Position(0, 3), "cjk"
"注文と配送", Position(0, 2), "cjk" ]
|> List.map testHoverTermNotFound
|> testList "Nothing found when hovering"

Expand Down Expand Up @@ -237,7 +242,7 @@ let tests =

testAsync "Test hover with context info and no match" {
let terms = []
let foundToken = Some "term"
let foundToken = Some("term", 0)

let hoverHandler =
Hover.handler
Expand All @@ -248,7 +253,7 @@ let tests =
(fun _ _ -> foundToken)

let hoverParams =
HoverParams(TextDocument = TextDocumentItem(Uri = System.Uri("file:///blah")))
HoverParams(TextDocument = TextDocumentItem(Uri = System.Uri("file:///blah")), Position = Position(0, 0))

let! result = hoverHandler hoverParams null null |> Async.AwaitTask

Expand Down
28 changes: 17 additions & 11 deletions src/language-server/Contextive.LanguageServer/Hover.fs
Original file line number Diff line number Diff line change
Expand Up @@ -67,22 +67,25 @@ module private Filtering =
// Both token and keys are normalised with simpleNormalize (NFKD + lowercase +
// Singularize). Singularize is a no-op for CJK text in Humanizer, so the
// normalisation is effectively NFKD + lowercase on both sides.
let findMatchingTermsBySubstring (context: GlossaryFile.Context) (token: string) =
// cursorOffsetInToken restricts matches to keys that span the cursor position.
let findMatchingTermsBySubstring (context: GlossaryFile.Context) (token: string) (cursorOffsetInToken: int) =
let normalizedToken = Normalization.simpleNormalize token

context.Index.Keys
|> Seq.filter (fun key -> normalizedToken.Contains(key))
|> Seq.filter (fun key ->
let idx = normalizedToken.IndexOf(key)
idx >= 0 && cursorOffsetInToken >= idx && cursorOffsetInToken < idx + key.Length)
|> Seq.collect (fun key -> context.Index[key])
|> Seq.distinctBy (fun t -> t.Name)

let termFilterForCandidateTermsWithIndex tokenAndCandidateTerms =
let termFilterForCandidateTermsWithIndex cursorOffsetInToken tokenAndCandidateTerms =
Seq.map (fun (c: GlossaryFile.Context) ->

let token = tokenAndCandidateTerms |> Seq.head |> fst

let terms =
if CandidateTerms.containsCJK token then
findMatchingTermsBySubstring c token
findMatchingTermsBySubstring c token cursorOffsetInToken
else
findMatchingTermsInIndex c tokenAndCandidateTerms

Expand All @@ -92,7 +95,7 @@ module private Filtering =

module private TextDocument =

let getTokenAtPosition (p: HoverParams) (tokenFinder: TextDocument.TokenFinder) =
let getTokenWithStartAtPosition (p: HoverParams) (tokenFinder: DocumentUri -> Position -> (string * int) option) =
match p.TextDocument with
| null -> None
| document -> tokenFinder document.Uri p.Position
Expand All @@ -111,10 +114,11 @@ let private hoverResult (contexts: GlossaryFile.FindResult) =
let private hoverContentForToken
(uri: string)
(termFinder: GlossaryFile.Finder)
(cursorOffsetInToken: int)
(tokensAndCandidateTerms: CandidateTerms.TokenAndCandidateTerms seq)
=
async {
let! findResult = termFinder uri (Filtering.termFilterForCandidateTermsWithIndex tokensAndCandidateTerms)
let! findResult = termFinder uri (Filtering.termFilterForCandidateTermsWithIndex cursorOffsetInToken tokensAndCandidateTerms)

return
if Seq.isEmpty findResult then
Expand All @@ -125,16 +129,18 @@ let private hoverContentForToken

let handler
(termFinder: GlossaryFile.Finder)
(tokenFinder: TextDocument.TokenFinder)
(tokenFinder: DocumentUri -> Position -> (string * int) option)
(p: HoverParams)
(_: HoverCapability)
_
=
async {
return!
match TextDocument.getTokenAtPosition p tokenFinder with
match TextDocument.getTokenWithStartAtPosition p tokenFinder with
| None -> async { return Lsp.noHoverResult }
| tokenAtPosition ->
| Some(token, tokenStart) ->
let cursorOffsetInToken = p.Position.Character - tokenStart

let uriPath =
try
p.TextDocument.Uri.ToUri().LocalPath
Expand All @@ -146,9 +152,9 @@ let handler

dp

tokenAtPosition
Some token
|> CandidateTerms.tokenToTokenAndCandidateTerms
|> hoverContentForToken uriPath termFinder
|> hoverContentForToken uriPath termFinder cursorOffsetInToken
}
|> Async.StartAsTask

Expand Down
2 changes: 1 addition & 1 deletion src/language-server/Contextive.LanguageServer/Server.fs
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ let private configureServer (input: Stream) (output: Stream) (opts: LanguageServ
.OnHover(
Hover.handler
<| GlossaryManager.lookup glossaryManager
<| TextDocument.findToken,
<| TextDocument.findTokenWithStart,
Hover.registrationOptions
)

Expand Down
11 changes: 11 additions & 0 deletions src/language-server/Contextive.LanguageServer/TextDocument.fs
Original file line number Diff line number Diff line change
Expand Up @@ -27,13 +27,24 @@ let getTokenAtPosition (lines: IList<string>) (position: Position) =
|> Lexer.getEnd position.Character
|> Lexer.get

let getTokenWithStartAtPosition (lines: IList<string>) (position: Position) =
Lexer.ofLine lines position.Line
|> Lexer.getStart position.Character
|> Lexer.getEnd position.Character
|> Lexer.getWithStart

type TokenFinder = DocumentUri -> Position -> string option

let findToken (documentUri: DocumentUri) (position: Position) =
match getDocument documentUri with
| None -> None
| Some(documentLines) -> getTokenAtPosition documentLines position

let findTokenWithStart (documentUri: DocumentUri) (position: Position) =
match getDocument documentUri with
| None -> None
| Some(documentLines) -> getTokenWithStartAtPosition documentLines position

let private linesFromText (document: string) : IList<string> =
document.ReplaceLineEndings().Split(System.Environment.NewLine)

Expand Down
6 changes: 6 additions & 0 deletions src/language-server/Contextive.LanguageServer/Tokeniser.fs
Original file line number Diff line number Diff line change
Expand Up @@ -72,3 +72,9 @@ type Lexer =
function
| Token(line, start, _) as t when t.HasLength -> line.Substring(start, t.Length.Value) |> trim |> Some
| _ -> None

static member getWithStart =
function
| Token(line, start, _) as t when t.HasLength ->
line.Substring(start, t.Length.Value) |> trim |> fun s -> Some(s, start)
| _ -> None
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
contexts:
- name: CJK Demo
domainVisionStatement: To illustrate CJK substring matching in the contextive glossary.
terms:
- name: 注文
definition: An order placed by a customer.
aliases:
- オーダー
- name: 購入
definition: A purchase transaction.
- name: 配送
definition: Delivery of goods.
- name: 사용자
definition: A user of the system.
- name: 购物车
definition: Shopping cart.
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
注文が届く
購入する
配送についての質問
注文と配送
オーダー
购物车
사용자