Skip to content

Commit 8baddf7

Browse files
committed
Merge branch 'feature/codellama-fill-in-the-middle-strategy' into develop
2 parents 3ef796a + 1356a58 commit 8baddf7

9 files changed

Lines changed: 142 additions & 16 deletions

File tree

Core/Sources/CodeCompletionService/OllamaService.swift

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -221,6 +221,7 @@ extension OllamaService {
221221
))
222222
let prompts = strategy.createTruncatedPrompt(promptStrategy: request)
223223
return ([request.systemPrompt] + prompts.map(\.content)).joined(separator: "\n\n")
224+
.trimmingCharacters(in: .whitespacesAndNewlines)
224225
}
225226

226227
func sendPrompt(_ prompt: String) async throws -> ResponseStream<ChatCompletionResponseChunk> {

Core/Sources/SuggestionService/RawSuggestionPostProcessing/DefaultRawSuggestionPostProcessingStrategy.swift

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,10 @@ struct DefaultRawSuggestionPostProcessingStrategy: RawSuggestionPostProcessingSt
6666
openingTag: String,
6767
closingTag: String
6868
) -> String {
69+
guard !openingTag.isEmpty, !closingTag.isEmpty else {
70+
return response
71+
}
72+
6973
let case_openingTagAtTheStart_parseEverythingInsideTheTag = Parse(input: Substring.self) {
7074
openingTag
7175

Lines changed: 99 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,99 @@
1+
import CopilotForXcodeKit
2+
import Foundation
3+
import Fundamental
4+
5+
/// https://ollama.com/library/codellama
6+
struct CodeLlamaFillInTheMiddleRequestStrategy: RequestStrategy {
7+
var sourceRequest: SuggestionRequest
8+
var prefix: [String]
9+
var suffix: [String]
10+
11+
var shouldSkip: Bool {
12+
prefix.last?.trimmingCharacters(in: .whitespaces) == "}"
13+
}
14+
15+
func createPrompt() -> Prompt {
16+
Prompt(
17+
sourceRequest: sourceRequest,
18+
prefix: prefix,
19+
suffix: suffix
20+
)
21+
}
22+
23+
func createRawSuggestionPostProcessor() -> NoOpRawSuggestionPostProcessingStrategy {
24+
NoOpRawSuggestionPostProcessingStrategy()
25+
}
26+
27+
enum Tag {
28+
public static let prefix = "<PRE>"
29+
public static let suffix = "<SUF>"
30+
public static let middle = "<MID>"
31+
}
32+
33+
struct Prompt: PromptStrategy {
34+
fileprivate(set) var systemPrompt: String = ""
35+
var sourceRequest: SuggestionRequest
36+
var prefix: [String]
37+
var suffix: [String]
38+
var filePath: String { sourceRequest.relativePath ?? sourceRequest.fileURL.path }
39+
var relevantCodeSnippets: [RelevantCodeSnippet] { sourceRequest.relevantCodeSnippets }
40+
var stopWords: [String] { ["\n\n", "<EOT>"] }
41+
var language: CodeLanguage? { sourceRequest.language }
42+
43+
var suggestionPrefix: SuggestionPrefix {
44+
guard let prefix = prefix.last else { return .empty }
45+
return .unchanged(prefix).curlyBracesLineBreak()
46+
}
47+
48+
func createPrompt(
49+
truncatedPrefix: [String],
50+
truncatedSuffix: [String],
51+
includedSnippets: [RelevantCodeSnippet]
52+
) -> [PromptMessage] {
53+
let suffix = truncatedSuffix.joined()
54+
return [
55+
.init(
56+
role: .user,
57+
content: """
58+
\(Tag.prefix) // File Path: \(filePath)
59+
// Indentation: \
60+
\(sourceRequest.indentSize) \
61+
\(sourceRequest.usesTabsForIndentation ? "tab" : "space")
62+
\(includedSnippets.map(\.content).joined(separator: "\n\n"))
63+
\(truncatedPrefix.joined()) \
64+
\(Tag.suffix)\(suffix.isEmpty ? "\n// End of file" : suffix) \
65+
\(Tag.middle)
66+
""".trimmingCharacters(in: .whitespacesAndNewlines)
67+
),
68+
]
69+
}
70+
}
71+
}
72+
73+
struct CodeLlamaFillInTheMiddleWithSystemPromptRequestStrategy: RequestStrategy {
74+
let strategy: CodeLlamaFillInTheMiddleRequestStrategy
75+
76+
init(sourceRequest: SuggestionRequest, prefix: [String], suffix: [String]) {
77+
strategy = .init(sourceRequest: sourceRequest, prefix: prefix, suffix: suffix)
78+
}
79+
80+
func createPrompt() -> some PromptStrategy {
81+
var prompt = strategy.createPrompt()
82+
prompt.systemPrompt = """
83+
You are a senior programer who take the surrounding code and \
84+
references from the codebase into account in order to write high-quality code to \
85+
complete the code enclosed in the given code. \
86+
You only respond with code that works and fits seamlessly with surrounding code. \
87+
Don't include anything else beyond the code. \
88+
The prefix will follow the PRE tag and the suffix will follow the SUF tag. \
89+
You should write the code that fits seamlessly after the MID tag.
90+
""".trimmingCharacters(in: .whitespacesAndNewlines)
91+
92+
return prompt
93+
}
94+
95+
func createRawSuggestionPostProcessor() -> some RawSuggestionPostProcessingStrategy {
96+
DefaultRawSuggestionPostProcessingStrategy(openingCodeTag: "", closingCodeTag: "")
97+
}
98+
}
99+

Core/Sources/SuggestionService/RequestStrategies/ContinueRequestStrategy.swift

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ struct ContinueRequestStrategy: RequestStrategy {
5454
```
5555
o World)
5656
```
57-
"""
57+
""".trimmingCharacters(in: .whitespacesAndNewlines)
5858
var sourceRequest: SuggestionRequest
5959
var prefix: [String]
6060
var suffix: [String]
@@ -113,18 +113,18 @@ struct ContinueRequestStrategy: RequestStrategy {
113113
```
114114
115115
Complete code inside \(Tag.openingCode)
116-
""")
116+
""".trimmingCharacters(in: .whitespacesAndNewlines))
117117

118118
let mockResponse = PromptMessage(role: .assistant, content: """
119119
\(Tag.openingCode)\(infillBlock)
120-
""")
120+
""".trimmingCharacters(in: .whitespacesAndNewlines))
121121

122122
let continuePrompt = PromptMessage(role: .user, content: """
123123
Continue generating. \
124124
Don't duplicate existing implementations. \
125125
Don't try to fix what was written. \
126126
Don't worry about typos.
127-
""")
127+
""".trimmingCharacters(in: .whitespacesAndNewlines))
128128

129129
return [
130130
initialPrompt,

Core/Sources/SuggestionService/RequestStrategies/DefaultRequestStrategy.swift

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ struct DefaultRequestStrategy: RequestStrategy {
5757
###
5858
World")\(Tag.closingCode)
5959
###
60-
"""
60+
""".trimmingCharacters(in: .whitespacesAndNewlines)
6161
var sourceRequest: SuggestionRequest
6262
var prefix: [String]
6363
var suffix: [String]
@@ -114,7 +114,7 @@ struct DefaultRequestStrategy: RequestStrategy {
114114
Complete code inside \(Tag.openingCode):
115115
116116
\(Tag.openingCode)\(infillBlock)
117-
"""
117+
""".trimmingCharacters(in: .whitespacesAndNewlines)
118118
}
119119

120120
static func createSnippetsPrompt(includedSnippets: [RelevantCodeSnippet]) -> String {

Core/Sources/SuggestionService/RequestStrategies/NaiveRequestStrategy.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ struct NaiveRequestStrategy: RequestStrategy {
7575
---
7676
7777
\(code)
78-
""")]
78+
""".trimmingCharacters(in: .whitespacesAndNewlines))]
7979
}
8080
}
8181
}

Core/Sources/SuggestionService/RequestStrategy.swift

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,8 @@ public enum RequestStrategyOption: String, CaseIterable, Codable {
2626
case `default` = ""
2727
case naive
2828
case `continue`
29+
case codeLlamaFillInTheMiddle
30+
case codeLlamaFillInTheMiddleWithSystemPrompt
2931
}
3032

3133
extension RequestStrategyOption {
@@ -37,6 +39,10 @@ extension RequestStrategyOption {
3739
return NaiveRequestStrategy.self
3840
case .continue:
3941
return ContinueRequestStrategy.self
42+
case .codeLlamaFillInTheMiddle:
43+
return CodeLlamaFillInTheMiddleRequestStrategy.self
44+
case .codeLlamaFillInTheMiddleWithSystemPrompt:
45+
return CodeLlamaFillInTheMiddleWithSystemPromptRequestStrategy.self
4046
}
4147
}
4248
}

CustomSuggestionService/ContentView.swift

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -177,6 +177,14 @@ struct RequestStrategyPicker: View {
177177
Text("Naive").tag(option.rawValue)
178178
case .continue:
179179
Text("Continue").tag(option.rawValue)
180+
case .codeLlamaFillInTheMiddle:
181+
Text(
182+
"CodeLlama Fill-in-the-Middle (Good for Codellama:xb-code and other models with Fill-in-the-Middle support)"
183+
)
184+
.tag(option.rawValue)
185+
case .codeLlamaFillInTheMiddleWithSystemPrompt:
186+
Text("CodeLlama Fill-in-the-Middle with System Prompt")
187+
.tag(option.rawValue)
180188
}
181189
}
182190
}

README.md

Lines changed: 17 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# Custom Suggestion Service for Copilot for Xcode
1+
# Custom Suggestion Service for Copilot for Xcode
22

33
This extension offers a custom suggestion service for [Copilot for Xcode](https://github.com/intitni/CopilotForXcode), allowing you to leverage a chat model to enhance the suggestions provided as you write code.
44

@@ -27,31 +27,39 @@ The app supports three types of suggestion services:
2727
- Models with completions API
2828
- [Tabby](https://tabby.tabbyml.com)
2929

30-
It is recommended to use Tabby since they have extensive experience in crafting prompts.
30+
If you are new to running a model locally, you can try [LM Studio](https://lmstudio.ai).
31+
32+
### Recommended Settings
33+
34+
- Use Tabby since they have extensive experience in code completion.
35+
- Use models with completions API with Fill-in-the-Middle support (for example, codellama:7b-code), and use the "Codellama Fill-in-the-Middle" strategy.
36+
37+
### Others
3138

32-
If you choose not to use Tabby, it is advisable to use a custom model with the completions API and employ the default request strategy.
39+
In other situations, it is advisable to use a custom model with the completions API over a chat completions API, and employ the default request strategy.
3340

3441
Ensure that the prompt format remains as simple as the following:
3542

36-
```
43+
```
3744
{System}
3845
{User}
3946
{Assistant}
4047
```
4148

42-
If you are new to running a model locally, you can try [LM Studio](https://lmstudio.ai).
43-
4449
## Strategies
4550

4651
- Default: This strategy meticulously explains the context to the model, prompting it to generate a suggestion.
4752
- Naive: This strategy rearranges the code in a naive way to trick the model into believing it's appending code at the end of a file.
4853
- Continue: This strategy employs the "Please Continue" technique to persuade the model that it has started a suggestion and must continue to complete it. (Only effective with the chat completion API).
54+
- CodeLlama Fill-in-the-Middle: It uses special tokens to guide the models to generate suggestions. The models need to support FIM to use it (codellama:xb-code, startcoder, etc.). This strategy uses the special tokens documented by CodeLlama.
55+
- CodeLlama Fill-in-the-Middle with System Prompt: The previous one doesn't have a system prompt telling it what to do. You can try to use it in models that don't support FIM.
4956

5057
## Contribution
5158

52-
Prompt engineering is a challenging task, and your assistance is invaluable.
59+
Prompt engineering is a challenging task, and your assistance is invaluable.
5360

54-
The most complex things are located within the `Core` package.
61+
The most complex things are located within the `Core` package.
5562

56-
- To add a new service, please refer to the `CodeCompletionService` folder.
63+
- To add a new service, please refer to the `CodeCompletionService` folder.
5764
- To add new request strategies, check out the `SuggestionService` folder.
65+

0 commit comments

Comments
 (0)